Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • toolbox/WindEnergyToolbox
  • tlbl/WindEnergyToolbox
  • cpav/WindEnergyToolbox
  • frza/WindEnergyToolbox
  • borg/WindEnergyToolbox
  • mmpe/WindEnergyToolbox
  • ozgo/WindEnergyToolbox
  • dave/WindEnergyToolbox
  • mmir/WindEnergyToolbox
  • wluo/WindEnergyToolbox
  • welad/WindEnergyToolbox
  • chpav/WindEnergyToolbox
  • rink/WindEnergyToolbox
  • shfe/WindEnergyToolbox
  • shfe1/WindEnergyToolbox
  • acdi/WindEnergyToolbox
  • angl/WindEnergyToolbox
  • wliang/WindEnergyToolbox
  • mimc/WindEnergyToolbox
  • wtlib/WindEnergyToolbox
  • cmos/WindEnergyToolbox
  • fabpi/WindEnergyToolbox
22 results
Show changes
Showing
with 668 additions and 1438 deletions
...@@ -6,200 +6,221 @@ ...@@ -6,200 +6,221 @@
#PBS -e ./pbs_out_chunks/remote_chnk_00000.err #PBS -e ./pbs_out_chunks/remote_chnk_00000.err
#PBS -W umask=0003 #PBS -W umask=0003
### Maximum wallclock time format HOURS:MINUTES:SECONDS ### Maximum wallclock time format HOURS:MINUTES:SECONDS
#PBS -l walltime=20:00:00 #PBS -l walltime=09:00:00
#PBS -l nodes=1:ppn=20 #PBS -l nodes=1:ppn=20
### Queue name ### Queue name
#PBS -q workq #PBS -q workq
echo "----------------------------------------------------------------------" echo "----------------------------------------------------------------------"
echo "activate python environment wetb_py3" echo "activate python environment py36-wetb"
source /home/python/miniconda3/bin/activate wetb_py3 source /home/python/miniconda3/bin/activate py36-wetb
echo "CHECK 2x IF wetb_py3 IS ACTIVE, IF NOT TRY AGAIN" echo "CHECK 2x IF py36-wetb IS ACTIVE, IF NOT TRY AGAIN"
CMD="from distutils.sysconfig import get_python_lib;print (get_python_lib().find('/usr/lib/python'))" CMD="from distutils.sysconfig import get_python_lib;print (get_python_lib().find('/usr/lib/python'))"
ACTIVATED=`python -c "$CMD"` ACTIVATED=`python -c "$CMD"`
if [ $ACTIVATED -eq 0 ]; then source /home/python/miniconda3/bin/activate wetb_py3;fi if [ $ACTIVATED -eq 0 ]; then source /home/python/miniconda3/bin/activate py36-wetb;fi
ACTIVATED=`python -c "$CMD"` ACTIVATED=`python -c "$CMD"`
if [ $ACTIVATED -eq 0 ]; then source /home/python/miniconda3/bin/activate wetb_py3;fi if [ $ACTIVATED -eq 0 ]; then source /home/python/miniconda3/bin/activate py36-wetb;fi
echo "----------------------------------------------------------------------" echo "----------------------------------------------------------------------"
cd /scratch/$USER/$PBS_JOBID/ cd "/scratch/$USER/$PBS_JOBID/"
echo 'current working directory:' echo 'current working directory:'
pwd pwd
echo "create CPU directories on the scratch disk" echo "create CPU directories on the scratch disk"
mkdir -p /scratch/$USER/$PBS_JOBID/remote/ mkdir -p "/scratch/$USER/$PBS_JOBID/remote/"
mkdir -p /scratch/$USER/$PBS_JOBID/0/ mkdir -p "/scratch/$USER/$PBS_JOBID/0/"
mkdir -p /scratch/$USER/$PBS_JOBID/1/ mkdir -p "/scratch/$USER/$PBS_JOBID/1/"
mkdir -p /scratch/$USER/$PBS_JOBID/2/ mkdir -p "/scratch/$USER/$PBS_JOBID/2/"
mkdir -p /scratch/$USER/$PBS_JOBID/3/ mkdir -p "/scratch/$USER/$PBS_JOBID/3/"
mkdir -p /scratch/$USER/$PBS_JOBID/4/ mkdir -p "/scratch/$USER/$PBS_JOBID/4/"
mkdir -p /scratch/$USER/$PBS_JOBID/5/ mkdir -p "/scratch/$USER/$PBS_JOBID/5/"
mkdir -p /scratch/$USER/$PBS_JOBID/6/ mkdir -p "/scratch/$USER/$PBS_JOBID/6/"
mkdir -p /scratch/$USER/$PBS_JOBID/7/ mkdir -p "/scratch/$USER/$PBS_JOBID/7/"
mkdir -p /scratch/$USER/$PBS_JOBID/8/ mkdir -p "/scratch/$USER/$PBS_JOBID/8/"
mkdir -p /scratch/$USER/$PBS_JOBID/9/ mkdir -p "/scratch/$USER/$PBS_JOBID/9/"
mkdir -p /scratch/$USER/$PBS_JOBID/10/ mkdir -p "/scratch/$USER/$PBS_JOBID/10/"
mkdir -p /scratch/$USER/$PBS_JOBID/11/ mkdir -p "/scratch/$USER/$PBS_JOBID/11/"
mkdir -p /scratch/$USER/$PBS_JOBID/12/ mkdir -p "/scratch/$USER/$PBS_JOBID/12/"
mkdir -p /scratch/$USER/$PBS_JOBID/13/ mkdir -p "/scratch/$USER/$PBS_JOBID/13/"
mkdir -p /scratch/$USER/$PBS_JOBID/14/ mkdir -p "/scratch/$USER/$PBS_JOBID/14/"
mkdir -p /scratch/$USER/$PBS_JOBID/15/ mkdir -p "/scratch/$USER/$PBS_JOBID/15/"
mkdir -p /scratch/$USER/$PBS_JOBID/16/ mkdir -p "/scratch/$USER/$PBS_JOBID/16/"
mkdir -p /scratch/$USER/$PBS_JOBID/17/
mkdir -p /scratch/$USER/$PBS_JOBID/18/
mkdir -p /scratch/$USER/$PBS_JOBID/19/
echo "----------------------------------------------------------------------" echo "----------------------------------------------------------------------"
cd $PBS_O_WORKDIR cd $PBS_O_WORKDIR
echo 'current working directory:' echo 'current working directory:'
pwd pwd
echo "get the zip-chunk file from the PBS_O_WORKDIR" echo "get the zip-chunk file from the PBS_O_WORKDIR"
cp ./zip-chunks-jess/remote_chnk_00000.zip /scratch/$USER/$PBS_JOBID/ cp "zip-chunks-jess/remote_chnk_00000.zip" "/scratch/$USER/$PBS_JOBID/"
echo "----------------------------------------------------------------------" echo "----------------------------------------------------------------------"
cd /scratch/$USER/$PBS_JOBID/ cd "/scratch/$USER/$PBS_JOBID/"
echo 'current working directory:' echo 'current working directory:'
pwd pwd
echo "unzip chunk, create dirs in cpu and sim_id folders" echo "unzip chunk, create dirs in cpu and sim_id folders"
/usr/bin/unzip remote_chnk_00000.zip -d 0/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "0/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 1/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "1/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 2/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "2/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 3/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "3/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 4/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "4/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 5/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "5/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 6/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "6/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 7/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "7/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 8/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "8/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 9/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "9/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 10/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "10/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 11/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "11/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 12/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "12/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 13/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "13/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 14/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "14/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 15/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "15/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 16/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "16/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 17/. >> /dev/null /usr/bin/unzip "remote_chnk_00000.zip" -d "remote/." >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 18/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d 19/. >> /dev/null
/usr/bin/unzip remote_chnk_00000.zip -d remote/. >> /dev/null
echo "----------------------------------------------------------------------" echo "----------------------------------------------------------------------"
cd /scratch/$USER/$PBS_JOBID/remote/ cd "/scratch/$USER/$PBS_JOBID/remote/"
echo 'current working directory:' echo 'current working directory:'
pwd pwd
echo "create turb_db directories" echo "create turb_db directories"
mkdir -p ../turb/ mkdir -p "../turb/"
echo "----------------------------------------------------------------------" echo "----------------------------------------------------------------------"
cd $PBS_O_WORKDIR cd "$PBS_O_WORKDIR"
echo 'current working directory:' echo 'current working directory:'
pwd pwd
# copy to scratch db directory for [turb_db_dir], [turb_base_name] # copy to scratch db directory for [turb_db_dir], [turb_base_name]
cp ../turb/none* /scratch/$USER/$PBS_JOBID/remote/../turb/. cp "../turb/turb_s100_10ms"*.bin "/scratch/$USER/$PBS_JOBID/remote/../turb/."
cp ../turb/turb_s100_10ms* /scratch/$USER/$PBS_JOBID/remote/../turb/. cp "../turb/turb_s101_11ms"*.bin "/scratch/$USER/$PBS_JOBID/remote/../turb/."
cp ../turb/turb_s101_11ms* /scratch/$USER/$PBS_JOBID/remote/../turb/.
# copy to scratch db directory for [meand_db_dir], [meand_base_name] # copy to scratch db directory for [meander_db_dir], [meander_base_name]
# copy to scratch db directory for [wake_db_dir], [wake_base_name] # copy to scratch db directory for [micro_db_dir], [micro_base_name]
echo "----------------------------------------------------------------------" echo "----------------------------------------------------------------------"
cd /scratch/$USER/$PBS_JOBID/ cd "/scratch/$USER/$PBS_JOBID/"
echo 'current working directory:' echo 'current working directory:'
pwd pwd
echo "create turb directories in CPU dirs" echo "create turb directories in CPU dirs"
mkdir -p 0/turb/ mkdir -p "0/turb/"
mkdir -p 1/turb/ mkdir -p "0/turb_meander/"
mkdir -p 2/turb/ mkdir -p "0/turb_micro/"
mkdir -p 3/turb/ mkdir -p "1/turb/"
mkdir -p 4/turb/ mkdir -p "1/turb_meander/"
mkdir -p 5/turb/ mkdir -p "1/turb_micro/"
mkdir -p 6/turb/ mkdir -p "2/turb/"
mkdir -p 7/turb/ mkdir -p "2/turb_meander/"
mkdir -p 8/turb/ mkdir -p "2/turb_micro/"
mkdir -p 9/turb/ mkdir -p "3/turb/"
mkdir -p 10/turb/ mkdir -p "3/turb_meander/"
mkdir -p 11/turb/ mkdir -p "3/turb_micro/"
mkdir -p 12/turb/ mkdir -p "4/turb/"
mkdir -p 13/turb/ mkdir -p "4/turb_meander/"
mkdir -p 14/turb/ mkdir -p "4/turb_micro/"
mkdir -p 15/turb/ mkdir -p "5/turb/"
mkdir -p 16/turb/ mkdir -p "5/turb_meander/"
mkdir -p 17/turb/ mkdir -p "5/turb_micro/"
mkdir -p 18/turb/ mkdir -p "6/turb/"
mkdir -p 19/turb/ mkdir -p "6/turb_meander/"
mkdir -p "6/turb_micro/"
mkdir -p "7/turb/"
mkdir -p "7/turb_meander/"
mkdir -p "7/turb_micro/"
mkdir -p "8/turb/"
mkdir -p "8/turb_meander/"
mkdir -p "8/turb_micro/"
mkdir -p "9/turb/"
mkdir -p "9/turb_meander/"
mkdir -p "9/turb_micro/"
mkdir -p "10/turb/"
mkdir -p "10/turb_meander/"
mkdir -p "10/turb_micro/"
mkdir -p "11/turb/"
mkdir -p "11/turb_meander/"
mkdir -p "11/turb_micro/"
mkdir -p "12/turb/"
mkdir -p "12/turb_meander/"
mkdir -p "12/turb_micro/"
mkdir -p "13/turb/"
mkdir -p "13/turb_meander/"
mkdir -p "13/turb_micro/"
mkdir -p "14/turb/"
mkdir -p "14/turb_meander/"
mkdir -p "14/turb_micro/"
mkdir -p "15/turb/"
mkdir -p "15/turb_meander/"
mkdir -p "15/turb_micro/"
mkdir -p "16/turb/"
mkdir -p "16/turb_meander/"
mkdir -p "16/turb_micro/"
echo "----------------------------------------------------------------------" echo "----------------------------------------------------------------------"
cd /scratch/$USER/$PBS_JOBID/remote/ cd "/scratch/$USER/$PBS_JOBID/remote/"
echo 'current working directory:' echo 'current working directory:'
pwd pwd
echo "Link all turb files into CPU dirs" echo "Link all turb files into CPU dirs"
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/0/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/0/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/1/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/1/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/2/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/2/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/3/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/3/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/4/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/4/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/5/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/5/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/6/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/6/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/7/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/7/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/8/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/8/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/9/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/9/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/10/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/10/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/11/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/11/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/12/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/12/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/13/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/13/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/14/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/14/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/15/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/15/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/16/turb/ \; find "/scratch/$USER/$PBS_JOBID/remote/../turb/" -iname "*.bin" -exec ln -s {} "/scratch/$USER/$PBS_JOBID/16/turb/" \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/17/turb/ \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/18/turb/ \;
find /scratch/$USER/$PBS_JOBID/remote/../turb/ -iname "*.bin" -exec ln -s {} /scratch/$USER/$PBS_JOBID/19/turb/ \;
echo "----------------------------------------------------------------------" echo "----------------------------------------------------------------------"
cd /scratch/$USER/$PBS_JOBID/ cd "/scratch/$USER/$PBS_JOBID/"
echo 'current working directory:' echo 'current working directory:'
pwd pwd
echo "START RUNNING JOBS IN find+xargs MODE" echo "START RUNNING JOBS IN find+xargs MODE"
WINEARCH=win32 WINEPREFIX=~/.wine32 winefix WINEARCH="win32" WINEPREFIX="$HOME/.wine32" winefix
# run all the PBS *.p files in find+xargs mode # run all the PBS *.p files in find+xargs mode
echo "following cases will be run from following path:" echo "following cases will be run from following path:"
echo "remote/pbs_in/dlc01_demos/" echo "remote/pbs_in/dlc01_demos/"
export LAUNCH_PBS_MODE=false export LAUNCH_PBS_MODE=false
/home/MET/sysalt/bin/find remote/pbs_in/dlc01_demos/ -type f -name '*.p' | sort -z /home/MET/sysalt/bin/find 'remote/pbs_in/dlc01_demos/' -type f -name '*.p' | sort -z
echo "number of files to be launched: "`find remote/pbs_in/dlc01_demos/ -type f | wc -l` echo "number of files to be launched: "`find "remote/pbs_in/dlc01_demos/" -type f | wc -l`
/home/MET/sysalt/bin/find remote/pbs_in/dlc01_demos/ -type f -name '*.p' -print0 | sort -z | /home/MET/sysalt/bin/xargs -0 -I{} --process-slot-var=CPU_NR -n 1 -P 20 sh {} /home/MET/sysalt/bin/find 'remote/pbs_in/dlc01_demos/' -type f -name '*.p' -print0 | sort -z | /home/MET/sysalt/bin/xargs -0 -I{} --process-slot-var=CPU_NR -n 1 -P 17 sh {}
echo "END OF JOBS IN find+xargs MODE" echo "END OF JOBS IN find+xargs MODE"
echo "----------------------------------------------------------------------" echo "----------------------------------------------------------------------"
echo 'total scratch disk usage:' echo "total scratch disk usage:"
du -hs /scratch/$USER/$PBS_JOBID/ du -hs "/scratch/$USER/$PBS_JOBID/"
cd /scratch/$USER/$PBS_JOBID/remote cd "/scratch/$USER/$PBS_JOBID/remote"
echo 'current working directory:' echo "current working directory:"
pwd pwd
echo "Results saved at sim_id directory:" echo "Results saved at sim_id directory:"
find . find .
echo "move statsdel into compressed archive" echo "move statsdel into compressed archive"
find res/dlc01_demos/ -name "*.csv" -print0 | xargs -0 tar --remove-files -rf prepost/statsdel_chnk_00000.tar find "res/dlc01_demos/" -name "*.csv" -print0 | xargs -0 tar --remove-files -rf "prepost/statsdel_chnk_00000.tar"
xz -z2 -T 20 prepost/statsdel_chnk_00000.tar xz -z2 -T 17 "prepost/statsdel_chnk_00000.tar"
echo "move log analysis into compressed archive" echo "move log analysis into compressed archive"
find logfiles/dlc01_demos/ -name "*.csv" -print0 | xargs -0 tar --remove-files -rf prepost/loganalysis_chnk_00000.tar find "logfiles/dlc01_demos/" -name "*.csv" -print0 | xargs -0 tar --remove-files -rf "prepost/loganalysis_chnk_00000.tar"
xz -z2 -T 20 prepost/loganalysis_chnk_00000.tar xz -z2 -T 17 "prepost/loganalysis_chnk_00000.tar"
echo "----------------------------------------------------------------------" echo "----------------------------------------------------------------------"
cd /scratch/$USER/$PBS_JOBID/ cd "/scratch/$USER/$PBS_JOBID/"
echo 'current working directory:' echo 'current working directory:'
pwd pwd
echo "move results back from node scratch/sim_id to origin, but ignore htc, and pbs_in directories." echo "move results back from node scratch/sim_id to origin, but ignore htc, and pbs_in directories."
echo "copy from remote/* to $PBS_O_WORKDIR/" echo "copy from remote/ to $PBS_O_WORKDIR/"
time rsync -au --remove-source-files remote/* $PBS_O_WORKDIR/ \ time rsync -au "remote/" "$PBS_O_WORKDIR/" \
--exclude pbs_in/dlc01_demos/* \ --exclude "pbs_in/dlc01_demos/*" \
--exclude *.htc --exclude *.htc
source deactivate source deactivate
echo "DONE !!" echo "DONE !!"
......
No preview for this file type
No preview for this file type
File deleted
File deleted
[seed],[wsp],[wave_seed],[wdir],[ReferenceTI],[ReferenceWindSpeed],[t0],[duration],[hub_height],[Case folder],[Duration],[Case id.],[Turb base name],[time_stop]
1001,4,101,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir010_sa1001_sw0101,turb_wsp04_s1001,3700
1001,4,101,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir020_sa1001_sw0101,turb_wsp04_s1001,3700
1001,4,102,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir010_sa1001_sw0102,turb_wsp04_s1001,3700
2001,4,102,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir020_sa2001_sw0102,turb_wsp04_s2001,3700
2001,4,103,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir010_sa2001_sw0103,turb_wsp04_s2001,3700
2001,4,103,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir020_sa2001_sw0103,turb_wsp04_s2001,3700
3002,6,201,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir010_sa3002_sw0201,turb_wsp06_s3002,3700
3002,6,201,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir020_sa3002_sw0201,turb_wsp06_s3002,3700
3002,6,202,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir010_sa3002_sw0202,turb_wsp06_s3002,3700
4002,6,202,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir020_sa4002_sw0202,turb_wsp06_s4002,3700
4002,6,203,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir010_sa4002_sw0203,turb_wsp06_s4002,3700
4002,6,203,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir020_sa4002_sw0203,turb_wsp06_s4002,3700
5003,8,301,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir010_sa5003_sw0301,turb_wsp08_s5003,3700
5003,8,301,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir020_sa5003_sw0301,turb_wsp08_s5003,3700
5003,8,302,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir010_sa5003_sw0302,turb_wsp08_s5003,3700
6003,8,302,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir020_sa6003_sw0302,turb_wsp08_s6003,3700
6003,8,303,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir010_sa6003_sw0303,turb_wsp08_s6003,3700
6003,8,303,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir020_sa6003_sw0303,turb_wsp08_s6003,3700
7001,4,101,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir010_sa7001_sw0101,turb_wsp04_s7001,3700
7001,4,101,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir020_sa7001_sw0101,turb_wsp04_s7001,3700
7001,4,102,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir010_sa7001_sw0102,turb_wsp04_s7001,3700
8001,4,102,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir020_sa8001_sw0102,turb_wsp04_s8001,3700
8001,4,103,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir010_sa8001_sw0103,turb_wsp04_s8001,3700
8001,4,103,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp04_wdir020_sa8001_sw0103,turb_wsp04_s8001,3700
9002,6,201,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir010_sa9002_sw0201,turb_wsp06_s9002,3700
9002,6,201,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir020_sa9002_sw0201,turb_wsp06_s9002,3700
9002,6,202,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir010_sa9002_sw0202,turb_wsp06_s9002,3700
10002,6,202,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir020_sa10002_sw0202,turb_wsp06_s10002,3700
10002,6,203,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir010_sa10002_sw0203,turb_wsp06_s10002,3700
10002,6,203,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp06_wdir020_sa10002_sw0203,turb_wsp06_s10002,3700
11003,8,301,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir010_sa11003_sw0301,turb_wsp08_s11003,3700
11003,8,301,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir020_sa11003_sw0301,turb_wsp08_s11003,3700
11003,8,302,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir010_sa11003_sw0302,turb_wsp08_s11003,3700
12003,8,302,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir020_sa12003_sw0302,turb_wsp08_s12003,3700
12003,8,303,10,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir010_sa12003_sw0303,turb_wsp08_s12003,3700
12003,8,303,20,0.12,44,100,100,116.8,DLC12,3600,DLC12_wsp08_wdir020_sa12003_sw0303,turb_wsp08_s12003,3700
[seed],[wave_seed],[wsp],[wdir],[ReferenceTI],[ReferenceWindSpeed],[t0],[duration],[hub_height],[Case folder],[Duration],[Case id.],[Turb base name],[time_stop]
1001,101,4,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir000_sa1001_sw0101,turb_wsp04_s1001,3700
1001,101,4,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir020_sa1001_sw0101,turb_wsp04_s1001,3700
1002,201,6,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir000_sa1002_sw0201,turb_wsp06_s1002,3700
2002,201,6,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir020_sa2002_sw0201,turb_wsp06_s2002,3700
2003,301,8,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir000_sa2003_sw0301,turb_wsp08_s2003,3700
2003,301,8,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir020_sa2003_sw0301,turb_wsp08_s2003,3700
3001,102,4,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir000_sa3001_sw0102,turb_wsp04_s3001,3700
3001,102,4,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir020_sa3001_sw0102,turb_wsp04_s3001,3700
3002,202,6,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir000_sa3002_sw0202,turb_wsp06_s3002,3700
4002,202,6,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir020_sa4002_sw0202,turb_wsp06_s4002,3700
4003,302,8,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir000_sa4003_sw0302,turb_wsp08_s4003,3700
4003,302,8,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir020_sa4003_sw0302,turb_wsp08_s4003,3700
5001,101,4,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir000_sa5001_sw0101,turb_wsp04_s5001,3700
5001,101,4,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir020_sa5001_sw0101,turb_wsp04_s5001,3700
5002,201,6,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir000_sa5002_sw0201,turb_wsp06_s5002,3700
6002,201,6,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir020_sa6002_sw0201,turb_wsp06_s6002,3700
6003,301,8,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir000_sa6003_sw0301,turb_wsp08_s6003,3700
6003,301,8,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir020_sa6003_sw0301,turb_wsp08_s6003,3700
7001,102,4,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir000_sa7001_sw0102,turb_wsp04_s7001,3700
7001,102,4,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp04_wdir020_sa7001_sw0102,turb_wsp04_s7001,3700
7002,202,6,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir000_sa7002_sw0202,turb_wsp06_s7002,3700
8002,202,6,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp06_wdir020_sa8002_sw0202,turb_wsp06_s8002,3700
8003,302,8,0,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir000_sa8003_sw0302,turb_wsp08_s8003,3700
8003,302,8,20,0.12,44,100,100,116.8,DLC13,3600,DLC13_wsp08_wdir020_sa8003_sw0302,turb_wsp08_s8003,3700
# identifiers for HawcStab2 modes of DTU10MW B0002 with HS2 2-15
# mode_number ; description
1 ;
2 ; 1st Tower FA
3 ; 1st Tower SS
4 ; 1st BF B whirling
5 ; 1st BF collective
6 ; 1st BF F whirling # with yawing
7 ; 1st BE B whirling
8 ; 1st BE F whirling
9 ; 2nd BF B whirling # with yawing
10 ; 2nd BF F whirling
11 ; 2nd BF collective
12 ; 1st shaft / BE collective
13 ; 2nd Tower FA
14 ; 2nd Tower SS
15 ; Tower torsion
16 ; 16
17 ; 17
18 ; 18
19 ; 19
20 ; 20
21 ; 21
22 ; 22
23 ; 23
24 ; 24
25 ; 25
import unittest
import os
#import shutil
import numpy as np
import pandas as pd
from wetb.prepost.GenerateDLCs import GenerateDLCCases
class Template(unittest.TestCase):
def setUp(self):
self.basepath = os.path.dirname(__file__)
class TestGenerateDLCCases(Template):
def test_dlcs(self):
# manually configure paths, HAWC2 model root path is then constructed as
# p_root_remote/PROJECT/sim_id, and p_root_local/PROJECT/sim_id
# adopt accordingly when you have configured your directories differently
p_root = os.path.join(self.basepath, 'data/demo_gendlc/')
# project name, sim_id, master file name
dlc_master = os.path.join(p_root, 'DLCs.xlsx')
dlc_folder = os.path.join(p_root, 'DLCs')
dlc_gen12 = os.path.join(dlc_folder, 'DLC12.xlsx')
dlc_gen13 = os.path.join(dlc_folder, 'DLC13.xlsx')
DLB = GenerateDLCCases()
DLB.execute(filename=dlc_master, folder=dlc_folder)
df12 = pd.read_excel(dlc_gen12)
# df12.to_csv('data/demo_gendlc/ref/DLC12.csv', index=False)
df12_ref = pd.read_csv(os.path.join(p_root, 'ref/DLC12.csv'))
# df12_ref2 = pd.read_excel(p2)[df12.columns]
pd.testing.assert_frame_equal(df12, df12_ref)
# df2 = df[['[Case id.]', '[wdir]', '[wsp]', '[seed]', '[wave_seed]']]
self.assertEqual(df12['[ReferenceWindSpeed]'].unique(), np.array([44]))
self.assertEqual(df12['[t0]'].unique(), np.array([100]))
self.assertEqual(len(df12['[Case id.]'].unique()), 2*3*3*2)
self.assertEqual(df12['[Case id.]'].values[0],
'DLC12_wsp04_wdir010_sa1001_sw0101')
df13 = pd.read_excel(dlc_gen13)
# df13.to_csv('data/demo_gendlc/ref/DLC13.csv', index=False)
df13_ref = pd.read_csv(os.path.join(p_root, 'ref/DLC13.csv'))
pd.testing.assert_frame_equal(df13, df13_ref)
if __name__ == "__main__":
unittest.main()
...@@ -3,13 +3,6 @@ Created on 05/11/2015 ...@@ -3,13 +3,6 @@ Created on 05/11/2015
@author: MMPE @author: MMPE
''' '''
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest import unittest
import os import os
import filecmp import filecmp
...@@ -63,10 +56,10 @@ class TestGenerateInputs(Template): ...@@ -63,10 +56,10 @@ class TestGenerateInputs(Template):
shutil.rmtree(os.path.join(p_root, tmpl.PROJECT, 'remote')) shutil.rmtree(os.path.join(p_root, tmpl.PROJECT, 'remote'))
tmpl.force_dir = tmpl.P_RUN tmpl.force_dir = tmpl.P_RUN
tmpl.launch_dlcs_excel('remote', silent=True, runmethod='gorm', tmpl.launch_dlcs_excel('remote', silent=True, runmethod='pbs',
pbs_turb=True, zipchunks=True, pbs_turb=True, zipchunks=True, ppn=17,
postpro_node_zipchunks=False, postpro_node_zipchunks=False,
postpro_node=False) postpro_node=False, update_model_data=True)
def cmp_dir(dir1, dir2): def cmp_dir(dir1, dir2):
lst1, lst2 = map(os.listdir, (dir1, dir2)) lst1, lst2 = map(os.listdir, (dir1, dir2))
...@@ -98,12 +91,11 @@ class TestGenerateInputs(Template): ...@@ -98,12 +91,11 @@ class TestGenerateInputs(Template):
print() print()
raise raise
# we can not git check-in empty dirs so we can not compare the complete # we can not git check-in empty dirs so we can not compare the complete
# directory structure withouth manually creating the empty dirs here # directory structure withouth manually creating the empty dirs here
for subdir in ['control', 'data', 'htc', 'pbs_in', 'pbs_in_turb', for subdir in ['control', 'data', 'htc', 'pbs_in', 'pbs_in_turb',
'htc/_master', 'htc/dlc01_demos', 'pbs_in/dlc01_demos', 'htc/_master', 'htc/dlc01_demos', 'pbs_in/dlc01_demos',
'zip-chunks-gorm', 'zip-chunks-jess']: 'zip-chunks-jess']:
remote = os.path.join(p_root, tmpl.PROJECT, 'remote', subdir) remote = os.path.join(p_root, tmpl.PROJECT, 'remote', subdir)
ref = os.path.join(p_root, tmpl.PROJECT, 'ref', subdir) ref = os.path.join(p_root, tmpl.PROJECT, 'ref', subdir)
# the zipfiles are taken care of separately # the zipfiles are taken care of separately
...@@ -119,7 +111,6 @@ class TestGenerateInputs(Template): ...@@ -119,7 +111,6 @@ class TestGenerateInputs(Template):
# compare the zip files # compare the zip files
for fname in ['demo_dlc_remote.zip', for fname in ['demo_dlc_remote.zip',
'zip-chunks-gorm/remote_chnk_00000.zip',
'zip-chunks-jess/remote_chnk_00000.zip']: 'zip-chunks-jess/remote_chnk_00000.zip']:
remote = os.path.join(p_root, tmpl.PROJECT, 'remote', fname) remote = os.path.join(p_root, tmpl.PROJECT, 'remote', fname)
ref = os.path.join(p_root, tmpl.PROJECT, 'ref', fname) ref = os.path.join(p_root, tmpl.PROJECT, 'ref', fname)
......
...@@ -3,20 +3,15 @@ Created on 05/11/2015 ...@@ -3,20 +3,15 @@ Created on 05/11/2015
@author: MMPE @author: MMPE
''' '''
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest import unittest
from os.path import join as pjoin from os.path import join as pjoin
from os.path import dirname as pdirname from os.path import dirname as pdirname
import numpy as np import numpy as np
from wetb.prepost.hawcstab2 import results, ReadControlTuning from wetb.prepost.hawcstab2 import (results, ReadControlTuning, read_cmb_all,
read_modid, PlotCampbell, plot_add_ps)
from wetb.prepost.mplutils import subplots
class Tests(unittest.TestCase): class Tests(unittest.TestCase):
...@@ -24,13 +19,12 @@ class Tests(unittest.TestCase): ...@@ -24,13 +19,12 @@ class Tests(unittest.TestCase):
""" """
def setUp(self): def setUp(self):
self.fpath_linear = pjoin(pdirname(__file__), self.fbase = pdirname(__file__)
'data/controller_input_linear.txt') self.fpath_linear = pjoin(self.fbase, 'data/controller_input_linear.txt')
self.fpath_quad = pjoin(pdirname(__file__), self.fpath_quad = pjoin(self.fbase, 'data/controller_input_quadratic.txt')
'data/controller_input_quadratic.txt')
def test_cmb_df(self): def test_cmb_df(self):
fname1 = pjoin(pdirname(__file__), 'data/campbell_diagram.cmb') fname1 = pjoin(self.fbase, 'data/campbell_diagram.cmb')
speed, freq, damp, real_eig = results().load_cmb(fname1) speed, freq, damp, real_eig = results().load_cmb(fname1)
self.assertIsNone(real_eig) self.assertIsNone(real_eig)
...@@ -40,7 +34,7 @@ class Tests(unittest.TestCase): ...@@ -40,7 +34,7 @@ class Tests(unittest.TestCase):
ops = freq.shape[0] ops = freq.shape[0]
self.assertEqual(len(speed), ops) self.assertEqual(len(speed), ops)
self.assertEqual(ops, 22) self.assertEqual(ops, 21)
self.assertEqual(mods, 10) self.assertEqual(mods, 10)
for k in range(ops): for k in range(ops):
...@@ -51,6 +45,30 @@ class Tests(unittest.TestCase): ...@@ -51,6 +45,30 @@ class Tests(unittest.TestCase):
self.assertEqual(len(df_oper['wind_ms'].unique()), 1) self.assertEqual(len(df_oper['wind_ms'].unique()), 1)
self.assertEqual(df_oper['wind_ms'].unique()[0], speed[k]) self.assertEqual(df_oper['wind_ms'].unique()[0], speed[k])
def test_read_cmb_all(self):
f_pwr = pjoin(self.fbase, 'data/dtu10mw_v1.pwr')
f_cmb = pjoin(self.fbase, 'data/campbell_diagram.cmb')
f_modid = pjoin(self.fbase, 'data/dtu10mw.modid')
dfp, dff, dfd = read_cmb_all(f_cmb, f_pwr=f_pwr, f_modid=f_modid)
self.assertEqual(dfp.shape, (21, 27))
self.assertEqual(dff.shape, (21, 10))
self.assertEqual(dfd.shape, (21, 10))
dfp, dff, dfd = read_cmb_all(f_cmb, f_pwr=None)
self.assertIsNone(dfp)
def test_read_modid(self):
fname = pjoin(self.fbase, 'data/dtu10mw.modid')
modes = read_modid(fname)
ref = ['', '1st Tower FA', '1st Tower SS', '1st BF B whirling',
'1st BF collective', '1st BF F whirling', '1st BE B whirling',
'1st BE F whirling', '2nd BF B whirling', '2nd BF F whirling',
'2nd BF collective', '1st shaft / BE collective',
'2nd Tower FA', '2nd Tower SS', 'Tower torsion']
self.assertEqual(len(modes), 25)
self.assertEqual(modes[:15], ref)
def test_linear_file(self): def test_linear_file(self):
hs2 = ReadControlTuning() hs2 = ReadControlTuning()
...@@ -138,7 +156,7 @@ class Tests(unittest.TestCase): ...@@ -138,7 +156,7 @@ class Tests(unittest.TestCase):
] ]
for fname in fnames: for fname in fnames:
fname = pjoin(pdirname(__file__), 'data', fname) fname = pjoin(self.fbase, 'data', fname)
res = results() res = results()
df_data = res.load_ind(fname) df_data = res.load_ind(fname)
data = np.loadtxt(fname) data = np.loadtxt(fname)
...@@ -150,7 +168,7 @@ class Tests(unittest.TestCase): ...@@ -150,7 +168,7 @@ class Tests(unittest.TestCase):
'dtu10mw_nogradient_v2.pwr', 'dtu10mw_nogradient_v2.pwr',
'dtu10mw_v1.pwr',] 'dtu10mw_v1.pwr',]
for fname in fnames: for fname in fnames:
fname = pjoin(pdirname(__file__), 'data', fname) fname = pjoin(self.fbase, 'data', fname)
res = results() res = results()
df_data, units = res.load_pwr_df(fname) df_data, units = res.load_pwr_df(fname)
data = np.loadtxt(fname) data = np.loadtxt(fname)
...@@ -161,7 +179,7 @@ class Tests(unittest.TestCase): ...@@ -161,7 +179,7 @@ class Tests(unittest.TestCase):
res = results() res = results()
fname = pjoin(pdirname(__file__), 'data', 'dtu10mw.opt') fname = pjoin(self.fbase, 'data', 'dtu10mw.opt')
df = res.load_operation(fname) df = res.load_operation(fname)
tmp = [5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, tmp = [5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25] 21, 22, 23, 24, 25]
...@@ -184,6 +202,21 @@ class Tests(unittest.TestCase): ...@@ -184,6 +202,21 @@ class Tests(unittest.TestCase):
np.testing.assert_allclose(tmp, df['P_aero'].values) np.testing.assert_allclose(tmp, df['P_aero'].values)
self.assertEqual(df.values.shape, (22, 5)) self.assertEqual(df.values.shape, (22, 5))
def test_plot_cmb(self):
base = pdirname(__file__)
f_pwr = pjoin(base, 'data/dtu10mw_v1.pwr')
f_cmb = pjoin(base, 'data/campbell_diagram.cmb')
dfp, dff, dfd = read_cmb_all(f_cmb, f_pwr=f_pwr)
cmb = PlotCampbell(dfp['V'].values, dff, dfd)
fig, axes = subplots(nrows=2, ncols=1, figsize=(8,10))
ax = axes[0,0]
ax = cmb.plot_freq(ax, col='k', mark='^', ls='-', modes='all')
ax = plot_add_ps(ax, dfp['V'], dfp['Speed'], ps=[1,3,6])
ax = axes[1,0]
ax = cmb.plot_damp(ax, col='k', mark='^', ls='-', modes=10)
if __name__ == "__main__": if __name__ == "__main__":
unittest.main() unittest.main()
...@@ -3,13 +3,6 @@ Created on 05/11/2015 ...@@ -3,13 +3,6 @@ Created on 05/11/2015
@author: MMPE @author: MMPE
''' '''
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest import unittest
import io import io
import os import os
...@@ -198,7 +191,7 @@ class TestsLoadResults(unittest.TestCase): ...@@ -198,7 +191,7 @@ class TestsLoadResults(unittest.TestCase):
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
res = windIO.LoadResults(self.respath, self.f1_chant, readdata=False) res = windIO.LoadResults(self.respath, self.f1_chant, readdata=False)
self.assertFalse(hasattr(res, 'sig')) self.assertFalse(hasattr(res, 'sig'))
np.testing.assert_array_equal(res.ch_df.index.values, np.arange(0,422)) np.testing.assert_array_equal(res.ch_df.index.values, np.arange(0,432))
self.assertEqual(res.ch_df.unique_ch_name.values[0], 'Time') self.assertEqual(res.ch_df.unique_ch_name.values[0], 'Time')
df = res.ch_df df = res.ch_df
self.assertEqual(2, len(df[df['bearing_name']=='shaft_rot'])) self.assertEqual(2, len(df[df['bearing_name']=='shaft_rot']))
...@@ -208,8 +201,13 @@ class TestsLoadResults(unittest.TestCase): ...@@ -208,8 +201,13 @@ class TestsLoadResults(unittest.TestCase):
exp = [[38, 'global-blade2-elem-019-zrel-1.00-State pos-z', 'm'], exp = [[38, 'global-blade2-elem-019-zrel-1.00-State pos-z', 'm'],
[200, 'blade2-blade2-node-017-momentvec-z', 'kNm'], [200, 'blade2-blade2-node-017-momentvec-z', 'kNm'],
[296, 'blade1-blade1-node-008-forcevec-z', 'kN'], [296, 'blade1-blade1-node-008-forcevec-z', 'kN'],
[415, 'Cl-1-54.82', 'deg'], [415, 'Cl-1-55.7', 'deg'],
[421, 'qwerty-is-azerty', 'is'] [421, 'qwerty', 'is'],
[422, 'wind_wake-wake_pos_x_1', 'm'],
[423, 'wind_wake-wake_pos_y_2', 'm'],
[424, 'wind_wake-wake_pos_z_5', 'm'],
[425, 'statevec_new-blade1-c2def-blade1-absolute-014.00-Dx', 'm'],
[429, 'statevec_new-blade1-c2def-blade1-elastic-014.00-Ry', 'deg'],
] ]
for k in exp: for k in exp:
self.assertEqual(df.loc[k[0], 'unique_ch_name'], k[1]) self.assertEqual(df.loc[k[0], 'unique_ch_name'], k[1])
...@@ -217,12 +215,17 @@ class TestsLoadResults(unittest.TestCase): ...@@ -217,12 +215,17 @@ class TestsLoadResults(unittest.TestCase):
self.assertEqual(res.ch_dict[k[1]]['chi'], k[0]) self.assertEqual(res.ch_dict[k[1]]['chi'], k[0])
self.assertEqual(res.ch_dict[k[1]]['units'], k[2]) self.assertEqual(res.ch_dict[k[1]]['units'], k[2])
# also check we have the tag from a very long description because
# we truncate after 150 characters
self.assertEqual(df.loc[426, 'sensortag'], 'this is a tag')
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
res = windIO.LoadResults(self.respath, self.f2_chant, readdata=False) res = windIO.LoadResults(self.respath, self.f2_chant, readdata=False)
self.assertFalse(hasattr(res, 'sig')) self.assertFalse(hasattr(res, 'sig'))
np.testing.assert_array_equal(res.ch_df.index.values, np.arange(0,217)) np.testing.assert_array_equal(res.ch_df.index.values, np.arange(0,217))
df = res.ch_df df = res.ch_df
self.assertEqual(4, len(df[df['sensortype']=='wsp-global'])) self.assertEqual(13, len(df[df['sensortype']=='wsp-global']))
self.assertEqual(3, len(df[df['sensortype']=='wsp-blade']))
self.assertEqual(2, len(df[df['sensortype']=='harmonic'])) self.assertEqual(2, len(df[df['sensortype']=='harmonic']))
self.assertEqual(2, len(df[df['blade_nr']==3])) self.assertEqual(2, len(df[df['blade_nr']==3]))
...@@ -236,13 +239,27 @@ class TestsLoadResults(unittest.TestCase): ...@@ -236,13 +239,27 @@ class TestsLoadResults(unittest.TestCase):
self.assertEqual(8, len(df1[df1['sensortype']=='a_grid'])) self.assertEqual(8, len(df1[df1['sensortype']=='a_grid']))
self.assertEqual(84, len(df1[df1['blade_nr']==1])) self.assertEqual(84, len(df1[df1['blade_nr']==1]))
def test_unified_chan_names_extensive2(self):
res = windIO.LoadResults(self.respath, self.f3_chant, readdata=False)
df1 = res.ch_df
fname = os.path.join(self.respath, self.f3_chant.replace('.sel', fname = os.path.join(self.respath, self.f3_chant.replace('.sel',
'.ch_df.csv')) '.ch_df.csv'))
# # when changing the tests, update the reference, check, and commit
# # but keep the same column ordering to not make the diffs to big
# cols = ['azimuth', 'bearing_name', 'blade_nr', 'bodyname',
# 'component', 'coord', 'direction', 'dll', 'flap_nr', 'io',
# 'io_nr', 'output_type', 'pos', 'radius','radius_actual', 'sensortag',
# 'sensortype', 'unique_ch_name', 'units', ]
# df1[cols].to_csv(fname)
# df1.to_excel(fname.replace('.csv', '.xlsx'))
# FIXME: read_csv for older pandas versions fails on reading the # FIXME: read_csv for older pandas versions fails on reading the
# mixed str/tuple column. Ignore the pos column for now # mixed str/tuple column. Ignore the pos column for now
colref = ['azimuth', 'bearing_name', 'blade_nr', 'bodyname', colref = ['azimuth', 'bearing_name', 'blade_nr', 'bodyname',
'component', 'coord', 'direction', 'dll', 'flap_nr', 'io', 'component', 'coord', 'direction', 'dll', 'flap_nr', 'io',
'io_nr', 'output_type', 'radius', 'sensortag', 'io_nr', 'output_type', 'radius', 'radius_actual', 'sensortag',
'sensortype', 'unique_ch_name', 'units'] # 'pos', 'sensortype', 'unique_ch_name', 'units'] # 'pos',
# keep_default_na: leave empyt strings as empty strings and not nan's # keep_default_na: leave empyt strings as empty strings and not nan's
# you can't have nice things: usecols in combination with index_col # you can't have nice things: usecols in combination with index_col
...@@ -253,7 +270,8 @@ class TestsLoadResults(unittest.TestCase): ...@@ -253,7 +270,8 @@ class TestsLoadResults(unittest.TestCase):
# for the comparison we need to have the columns with empty/number # for the comparison we need to have the columns with empty/number
# mixed data types in a consistent data type # mixed data types in a consistent data type
for col in ['azimuth', 'radius', 'blade_nr', 'io_nr', 'flap_nr', 'dll']: for col in ['azimuth', 'radius', 'blade_nr', 'io_nr', 'flap_nr',
'dll', 'radius_actual']:
df1.loc[df1[col]=='', col] = np.nan df1.loc[df1[col]=='', col] = np.nan
df1[col] = df1[col].astype(np.float32) df1[col] = df1[col].astype(np.float32)
df2.loc[df2[col]=='', col] = np.nan df2.loc[df2[col]=='', col] = np.nan
...@@ -261,7 +279,7 @@ class TestsLoadResults(unittest.TestCase): ...@@ -261,7 +279,7 @@ class TestsLoadResults(unittest.TestCase):
# print(df1.pos[14], df2.pos[14]) # print(df1.pos[14], df2.pos[14])
# the pos columns contains also tuples, read from csv doesn't get that # the pos columns contains also tuples, read from csv doesn't get that
# df1['pos'] = df1['pos'].astype(np.str) # df1['pos'] = df1['pos'].astype(str)
# df1['pos'] = df1['pos'].str.replace("'", "") # df1['pos'] = df1['pos'].str.replace("'", "")
# print(df1.pos[14], df2.pos[14]) # print(df1.pos[14], df2.pos[14])
...@@ -272,7 +290,20 @@ class TestsLoadResults(unittest.TestCase): ...@@ -272,7 +290,20 @@ class TestsLoadResults(unittest.TestCase):
# pd.testing.assert_frame_equal(df1, df2) # pd.testing.assert_frame_equal(df1, df2)
# ...but there is no testing.assert_frame_equal in pandas 0.14 # ...but there is no testing.assert_frame_equal in pandas 0.14
for col in colref: for col in colref:
np.testing.assert_array_equal(df1[col].values, df2[col].values) # ignore nan values for float cols
if df1[col].dtype == np.dtype('float32'):
sel = ~np.isnan(df1[col].values)
np.testing.assert_array_equal(df1[col].values[sel],
df2[col].values[sel])
else:
np.testing.assert_array_equal(df1[col].values,
df2[col].values)
def test_df_stats(self):
"""
"""
res = windIO.LoadResults(self.respath, self.fascii)
df_stats = res.statsdel_df()
class TestUserWind(unittest.TestCase): class TestUserWind(unittest.TestCase):
......
...@@ -4,35 +4,23 @@ Created on Thu Apr 3 19:53:59 2014 ...@@ -4,35 +4,23 @@ Created on Thu Apr 3 19:53:59 2014
@author: dave @author: dave
""" """
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from __future__ import absolute_import
from builtins import dict
from io import open as opent
from builtins import range
from builtins import str
from builtins import int
from future import standard_library
standard_library.install_aliases()
from builtins import object
__author__ = 'David Verelst' __author__ = 'David Verelst'
__license__ = 'GPL' __license__ = 'GPL'
__version__ = '0.5' __version__ = '0.5'
import os import os
import copy
import struct
import math import math
from time import time from time import time
import codecs
from itertools import chain from itertools import chain
import re as re
import numpy as np import numpy as np
import scipy as sp import scipy as sp
import scipy.integrate as integrate import scipy.io as sio
# scipy changed interface name from 1.14 to 1.15
try:
from scipy.integrate import trapezoid # scipy >1.15
except ImportError:
from scipy.integrate import trapz as trapezoid # scipy <=1.14
import pandas as pd import pandas as pd
# misc is part of prepost, which is available on the dtu wind gitlab server: # misc is part of prepost, which is available on the dtu wind gitlab server:
...@@ -40,6 +28,7 @@ import pandas as pd ...@@ -40,6 +28,7 @@ import pandas as pd
from wetb.prepost import misc from wetb.prepost import misc
# wind energy python toolbox, available on the dtu wind redmine server: # wind energy python toolbox, available on the dtu wind redmine server:
# http://vind-redmine.win.dtu.dk/projects/pythontoolbox/repository/show/fatigue_tools # http://vind-redmine.win.dtu.dk/projects/pythontoolbox/repository/show/fatigue_tools
from wetb.hawc2.sensor_names import unified_channel_names
from wetb.hawc2.Hawc2io import ReadHawc2 from wetb.hawc2.Hawc2io import ReadHawc2
from wetb.fatigue_tools.fatigue import (eq_load, cycle_matrix2) from wetb.fatigue_tools.fatigue import (eq_load, cycle_matrix2)
...@@ -134,6 +123,12 @@ class LogFile(object): ...@@ -134,6 +123,12 @@ class LogFile(object):
# *** ERROR *** Out of limits in user defined shear field - limit value used # *** ERROR *** Out of limits in user defined shear field - limit value used
self.err_sim[' *** ERROR *** Out of limit'] = len(self.err_sim) self.err_sim[' *** ERROR *** Out of limit'] = len(self.err_sim)
# NEAR WAKE ERRORS
# ERROR in Near Wake! The radius of the tip is smaller (or equal) to
self.err_sim[' ERROR in Near Wake! The ra'] = len(self.err_sim)
# ERROR: Maximum number of near wake iterations reached
self.err_sim[' ERROR: Maximum number of n'] = len(self.err_sim)
# TODO: error message from a non existing channel output/input # TODO: error message from a non existing channel output/input
# add more messages if required... # add more messages if required...
...@@ -144,9 +139,8 @@ class LogFile(object): ...@@ -144,9 +139,8 @@ class LogFile(object):
def readlog(self, fname, case=None, save_iter=False): def readlog(self, fname, case=None, save_iter=False):
""" """
""" """
# open the current log file # be cautious and try a few encodings when reading the file
with open(fname, 'r') as f: lines = misc.readlines_try_encodings(fname)
lines = f.readlines()
# keep track of the messages allready found in this file # keep track of the messages allready found in this file
tempLog = [] tempLog = []
...@@ -253,7 +247,8 @@ class LogFile(object): ...@@ -253,7 +247,8 @@ class LogFile(object):
iterations[time_step-1,2] = 1 iterations[time_step-1,2] = 1
# method of last resort, we have no idea what message # method of last resort, we have no idea what message
elif line[:10] == ' *** ERROR' or line[:10]==' ** WARNING': elif line[:10] == ' *** ERROR' or line[:10]==' ** WARNING' \
or line[:6] == ' ERROR':
icol = subcols_sim*self.sim_cols icol = subcols_sim*self.sim_cols
icol += subcols_init*self.init_cols + 1 icol += subcols_init*self.init_cols + 1
# line number of the message # line number of the message
...@@ -442,7 +437,7 @@ class LogFile(object): ...@@ -442,7 +437,7 @@ class LogFile(object):
gr = ('first_tstep_%i', 'last_step_%i', 'nr_%i', 'msg_%i') gr = ('first_tstep_%i', 'last_step_%i', 'nr_%i', 'msg_%i')
colnames.extend(list(chain_iter( (k % i for k in gr) colnames.extend(list(chain_iter( (k % i for k in gr)
for i in range(100,105,1))) ) for i in range(100,100+nr_sim,1))) )
colnames.extend(['nr_extra', 'msg_extra']) colnames.extend(['nr_extra', 'msg_extra'])
colnames.extend(['elapsted_time', colnames.extend(['elapsted_time',
'last_time_step', 'last_time_step',
...@@ -484,8 +479,7 @@ class LoadResults(ReadHawc2): ...@@ -484,8 +479,7 @@ class LoadResults(ReadHawc2):
Usage: Usage:
obj = LoadResults(file_path, file_name) obj = LoadResults(file_path, file_name)
This class is called like a function: This is a subclass of wetb.hawc2.Windio:ReadHawc2.
HawcResultData() will read the specified file upon object initialization.
Available output: Available output:
obj.sig[timeStep,channel] : complete result file in a numpy array obj.sig[timeStep,channel] : complete result file in a numpy array
...@@ -536,11 +530,6 @@ class LoadResults(ReadHawc2): ...@@ -536,11 +530,6 @@ class LoadResults(ReadHawc2):
ch_dict[tag]['units'] ch_dict[tag]['units']
""" """
# ch_df columns, these are created by LoadResults._unified_channel_names
cols = set(['bearing_name', 'sensortag', 'bodyname', 'chi', 'component',
'pos', 'coord', 'sensortype', 'radius', 'blade_nr', 'units',
'output_type', 'io_nr', 'io', 'dll', 'azimuth', 'flap_nr',
'direction'])
# start with reading the .sel file, containing the info regarding # start with reading the .sel file, containing the info regarding
# how to read the binary file and the channel information # how to read the binary file and the channel information
...@@ -577,7 +566,7 @@ class LoadResults(ReadHawc2): ...@@ -577,7 +566,7 @@ class LoadResults(ReadHawc2):
if not (not readdata and (self.FileType == 'GTSDF')): if not (not readdata and (self.FileType == 'GTSDF')):
self.N = int(self.NrSc) self.N = int(self.NrSc)
self.Nch = int(self.NrCh) self.Nch = int(self.NrCh)
self.ch_details = np.ndarray(shape=(self.Nch, 3), dtype='<U100') self.ch_details = np.ndarray(shape=(self.Nch, 3), dtype='<U150')
for ic in range(self.Nch): for ic in range(self.Nch):
self.ch_details[ic, 0] = self.ChInfo[0][ic] self.ch_details[ic, 0] = self.ChInfo[0][ic]
self.ch_details[ic, 1] = self.ChInfo[1][ic] self.ch_details[ic, 1] = self.ChInfo[1][ic]
...@@ -589,743 +578,20 @@ class LoadResults(ReadHawc2): ...@@ -589,743 +578,20 @@ class LoadResults(ReadHawc2):
stop = time() - start stop = time() - start
print('time to load HAWC2 file:', stop, 's') print('time to load HAWC2 file:', stop, 's')
# TODO: THIS IS STILL A WIP
def _make_channel_names(self):
"""Give every channel a unique channel name which is (nearly) identical
to the channel names as defined in the htc output section. Instead
of spaces, use colon (;) to seperate the different commands.
THIS IS STILL A WIP
see also issue #11:
https://gitlab.windenergy.dtu.dk/toolbox/WindEnergyToolbox/issues/11
"""
index = {}
names = {'htc_name':[], 'chi':[], 'label':[], 'unit':[], 'index':[],
'name':[], 'description':[]}
constraint_fmts = {'bea1':'constraint;bearing1',
'bea2':'constraint;bearing2',
'bea3':'constraint;bearing3',
'bea4':'constraint;bearing4'}
# mbdy momentvec tower 1 1 global
force_fmts = {'F':'mbdy;forcevec;{body};{nodenr:03i};{coord};{comp}',
'M':'mbdy;momentvec;{body};{nodenr:03i};{coord};{comp}'}
state_fmt = 'mbdy;{state};{typ};{body};{elnr:03i};{zrel:01.02f};{coord}'
wind_coord_map = {'Vx':'1', 'Vy':'2', 'Vz':'3'}
wind_fmt = 'wind;{typ};{coord};{x};{y};{z};{comp}'
for ch in range(self.Nch):
name = self.ch_details[ch, 0]
name_items = misc.remove_items(name.split(' '), '')
description = self.ch_details[ch, 2]
descr_items = misc.remove_items(description.split(' '), '')
unit = self.ch_details[ch, 1]
# default names
htc_name = ' '.join(name_items+descr_items)
label = ''
coord = ''
typ = ''
elnr = ''
nodenr = ''
zrel = ''
state = ''
# CONSTRAINTS: BEARINGS
if name_items[0] in constraint_fmts:
htc_name = constraint_fmts[name_items[0]] + ';'
htc_name += (descr_items[0] + ';')
htc_name += unit
# MBDY FORCES/MOMENTS
elif name_items[0][0] in force_fmts:
comp = name_items[0]
if comp[0] == 'F':
i0 = 1
else:
i0 = 0
label = description.split('coo: ')[1].split(' ')[1]
coord = descr_items[i0+5]
body = descr_items[i0+1][5:]#.replace('Mbdy:', '')
nodenr = int(descr_items[i0+3])
htc_name = force_fmts[comp[0]].format(body=body, coord=coord,
nodenr=nodenr, comp=comp)
# STATE: POS, VEL, ACC, STATE_ROT
elif descr_items[0][:5] == 'State':
if name_items[0] == 'State':
i0 = 1
state = 'state'
else:
i0 = 0
state = 'state_rot'
typ = name_items[i0+0]
comp = name_items[i0+1]
coord = name_items[i0+3]
body = descr_items[3][5:]#.replace('Mbdy:', '')
elnr = int(descr_items[5])
zrel = float(descr_items[6][6:])#.replace('Z-rel:', ''))
if len(descr_items) > 8:
label = ' '.join(descr_items[9:])
htc_name = state_fmt.format(typ=typ, body=body, elnr=elnr,
zrel=zrel, coord=coord,
state=state)
# WINDSPEED
elif description[:9] == 'Free wind':
if descr_items[4] == 'gl.':
coord = '1' # global
else:
coord = '2' # non-rotating rotor coordinates
try:
comp = wind_coord_map[descr_items[3][:-1]]
typ = 'free_wind'
except KeyError:
comp = descr_items[3]
typ = 'free_wind_hor'
tmp = description.split('pos')[1]
x, y, z = tmp.split(',')
# z might hold a label....
z_items = z.split(' ')
if len(z_items) > 1:
label = ' '.join(z_items[1:])
z = z_items[0]
x, y, z = x.strip(), y.strip(), z.strip()
htc_name = wind_fmt.format(typ=typ, coord=coord, x=x, y=y, z=z,
comp=comp)
names['htc_name'].append(htc_name)
names['chi'].append(ch)
# this is the Channel column from the sel file, so the unique index
# which is dependent on the order of the channels
names['index'].append(ch+1)
names['unit'].append(unit)
names['name'].append(name)
names['description'].append(description)
names['label'].append(label)
names['state'].append(state)
names['type'].append(typ)
names['comp'].append(comp)
names['coord'].append(coord)
names['elnr'].append(coord)
names['nodenr'].append(coord)
names['zrel'].append(coord)
index[name] = ch
return names, index
def _unified_channel_names(self): def _unified_channel_names(self):
""" """For backwards compatibiliity: to create an alternative sensor naming
Make certain channels independent from their index. scheme that is consistent and unique so you can always refer to a channel
name instead of its index in an output file.
The unified channel dictionary ch_dict holds consequently named See wetb.hawc2.sensor_names.unified_channel_names instead.
channels as the key, and the all information is stored in the value
as another dictionary.
The ch_dict key/values pairs are structured differently for different
type of channels. Currently supported channels are:
For forcevec, momentvec, state commands: Returns
node numbers start with 0 at the root -------
element numbers start with 1 at the root None.
key:
coord-bodyname-pos-sensortype-component
global-tower-node-002-forcevec-z
local-blade1-node-005-momentvec-z
hub1-blade1-elem-011-zrel-1.00-state pos-z
value:
ch_dict[tag]['coord']
ch_dict[tag]['bodyname']
ch_dict[tag]['pos']
ch_dict[tag]['sensortype']
ch_dict[tag]['component']
ch_dict[tag]['chi']
ch_dict[tag]['sensortag']
ch_dict[tag]['units']
For the DLL's this is:
key:
DLL-dll_name-io-io_nr
DLL-yaw_control-outvec-3
DLL-yaw_control-inpvec-1
value:
ch_dict[tag]['dll_name']
ch_dict[tag]['io']
ch_dict[tag]['io_nr']
ch_dict[tag]['chi']
ch_dict[tag]['sensortag']
ch_dict[tag]['units']
For the bearings this is:
key:
bearing-bearing_name-output_type-units
bearing-shaft_nacelle-angle_speed-rpm
value:
ch_dict[tag]['bearing_name']
ch_dict[tag]['output_type']
ch_dict[tag]['chi']
ch_dict[tag]['units']
For many of the aero sensors:
'Cl', 'Cd', 'Alfa', 'Vrel'
key:
sensortype-blade_nr-pos
Cl-1-0.01
value:
ch_dict[tag]['sensortype']
ch_dict[tag]['blade_nr']
ch_dict[tag]['pos']
ch_dict[tag]['chi']
ch_dict[tag]['units']
""" """
# save them in a dictionary, use the new coherent naming structure
# as the key, and as value again a dict that hols all the different
# classifications: (chi, channel nr), (coord, coord), ...
self.ch_dict = dict()
# some channel ID's are unique, use them
ch_unique = set(['Omega', 'Ae rot. torque', 'Ae rot. power',
'Ae rot. thrust', 'Time', 'Azi 1'])
ch_aero = set(['Cl', 'Cd', 'Cm', 'Alfa', 'Vrel', 'Tors_e', 'Alfa',
'Lift', 'Drag'])
ch_aerogrid = set(['a_grid', 'am_grid', 'CT', 'CQ'])
# also safe as df
# cols = set(['bearing_name', 'sensortag', 'bodyname', 'chi',
# 'component', 'pos', 'coord', 'sensortype', 'radius',
# 'blade_nr', 'units', 'output_type', 'io_nr', 'io', 'dll',
# 'azimuth', 'flap_nr'])
df_dict = {col: [] for col in self.cols}
df_dict['unique_ch_name'] = []
# scan through all channels and see which can be converted
# to sensible unified name
for ch in range(self.Nch):
items = self.ch_details[ch, 2].split(' ')
# remove empty values in the list
items = misc.remove_items(items, '')
dll = False
# be carefull, identify only on the starting characters, because
# the signal tag can hold random text that in some cases might
# trigger a false positive
# -----------------------------------------------------------------
# check for all the unique channel descriptions
if self.ch_details[ch,0].strip() in ch_unique:
tag = self.ch_details[ch, 0].strip()
channelinfo = {}
channelinfo['units'] = self.ch_details[ch, 1]
channelinfo['sensortag'] = self.ch_details[ch, 2]
channelinfo['chi'] = ch
# -----------------------------------------------------------------
# or in the long description:
# 0 1 2 3 4 5 6 and up
# MomentMz Mbdy:blade nodenr: 5 coo: blade TAG TEXT
elif self.ch_details[ch, 2].startswith('MomentM'):
coord = items[5]
bodyname = items[1].replace('Mbdy:', '')
# set nodenr to sortable way, include leading zeros
# node numbers start with 0 at the root
nodenr = '%03i' % int(items[3])
# skip the attached the component
# sensortype = items[0][:-2]
# or give the sensor type the same name as in HAWC2
sensortype = 'momentvec'
component = items[0][-1:len(items[0])]
# the tag only exists if defined
if len(items) > 6:
sensortag = ' '.join(items[6:])
else:
sensortag = ''
# and tag it
pos = 'node-%s' % nodenr
tagitems = (coord, bodyname, pos, sensortype, component)
tag = '%s-%s-%s-%s-%s' % tagitems
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = coord
channelinfo['bodyname'] = bodyname
channelinfo['pos'] = pos
channelinfo['sensortype'] = sensortype
channelinfo['component'] = component
channelinfo['chi'] = ch
channelinfo['sensortag'] = sensortag
channelinfo['units'] = self.ch_details[ch, 1]
# -----------------------------------------------------------------
# 0 1 2 3 4 5 6 7 and up
# Force Fx Mbdy:blade nodenr: 2 coo: blade TAG TEXT
elif self.ch_details[ch, 2].startswith('Force'):
coord = items[6]
bodyname = items[2].replace('Mbdy:', '')
nodenr = '%03i' % int(items[4])
# skipe the attached the component
# sensortype = items[0]
# or give the sensor type the same name as in HAWC2
sensortype = 'forcevec'
component = items[1][1]
if len(items) > 7:
sensortag = ' '.join(items[7:])
else:
sensortag = ''
# and tag it
pos = 'node-%s' % nodenr
tagitems = (coord, bodyname, pos, sensortype, component)
tag = '%s-%s-%s-%s-%s' % tagitems
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = coord
channelinfo['bodyname'] = bodyname
channelinfo['pos'] = pos
channelinfo['sensortype'] = sensortype
channelinfo['component'] = component
channelinfo['chi'] = ch
channelinfo['sensortag'] = sensortag
channelinfo['units'] = self.ch_details[ch, 1]
# -----------------------------------------------------------------
# ELEMENT STATES: pos, vel, acc, rot, ang
# 0 1 2 3 4 5 6 7 8
# State pos x Mbdy:blade E-nr: 1 Z-rel:0.00 coo: blade
# 0 1 2 3 4 5 6 7 8 9+
# State_rot proj_ang tx Mbdy:bname E-nr: 1 Z-rel:0.00 coo: cname label
# State_rot omegadot tz Mbdy:bname E-nr: 1 Z-rel:1.00 coo: cname label
elif self.ch_details[ch,2].startswith('State'):
# or self.ch_details[ch,0].startswith('euler') \
# or self.ch_details[ch,0].startswith('ax') \
# or self.ch_details[ch,0].startswith('omega') \
# or self.ch_details[ch,0].startswith('proj'):
coord = items[8]
bodyname = items[3].replace('Mbdy:', '')
# element numbers start with 1 at the root
elementnr = '%03i' % int(items[5])
zrel = '%04.2f' % float(items[6].replace('Z-rel:', ''))
# skip the attached the component
#sensortype = ''.join(items[0:2])
# or give the sensor type the same name as in HAWC2
tmp = self.ch_details[ch, 0].split(' ')
sensortype = tmp[0]
if sensortype.startswith('State'):
sensortype += ' ' + tmp[1]
component = items[2]
if len(items) > 8:
sensortag = ' '.join(items[9:])
else:
sensortag = ''
# and tag it
pos = 'elem-%s-zrel-%s' % (elementnr, zrel)
tagitems = (coord, bodyname, pos, sensortype, component)
tag = '%s-%s-%s-%s-%s' % tagitems
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = coord
channelinfo['bodyname'] = bodyname
channelinfo['pos'] = pos
channelinfo['sensortype'] = sensortype
channelinfo['component'] = component
channelinfo['chi'] = ch
channelinfo['sensortag'] = sensortag
channelinfo['units'] = self.ch_details[ch, 1]
# -----------------------------------------------------------------
# DLL CONTROL I/O
# there are two scenario's on how the channel description is formed
# the channel id is always the same though
# id for all three cases:
# DLL out 1: 3
# DLL inp 2: 3
# description case 1 ("dll type2_dll b2h2 inpvec 30" in htc output)
# 0 1 2 3 4+
# yaw_control outvec 3 yaw_c input reference angle
# description case 2 ("dll inpvec 2 1" in htc output):
# 0 1 2 3 4 5 6+
# DLL : 2 inpvec : 4 mgen hss
# description case 3
# 0 1 2 4
# hawc_dll :echo outvec : 1
elif self.ch_details[ch, 0].startswith('DLL'):
# case 3
if items[1][0] == ':echo':
# hawc_dll named case (case 3) is polluted with colons
items = self.ch_details[ch,2].replace(':', '')
items = items.split(' ')
items = misc.remove_items(items, '')
dll = items[1]
io = items[2]
io_nr = items[3]
tag = 'DLL-%s-%s-%s' % (dll, io, io_nr)
sensortag = ''
# case 2: no reference to dll name
elif self.ch_details[ch,2].startswith('DLL'):
dll = items[2]
io = items[3]
io_nr = items[5]
sensortag = ' '.join(items[6:])
# and tag it
tag = 'DLL-%s-%s-%s' % (dll,io,io_nr)
# case 1: type2 dll name is given
else:
dll = items[0]
io = items[1]
io_nr = items[2]
sensortag = ' '.join(items[3:])
tag = 'DLL-%s-%s-%s' % (dll, io, io_nr)
# save all info in the dict
channelinfo = {}
channelinfo['dll'] = dll
channelinfo['io'] = io
channelinfo['io_nr'] = io_nr
channelinfo['chi'] = ch
channelinfo['sensortag'] = sensortag
channelinfo['units'] = self.ch_details[ch, 1]
channelinfo['sensortype'] = 'dll-io'
# -----------------------------------------------------------------
# BEARING OUTPUS
# bea1 angle_speed rpm shaft_nacelle angle speed
elif self.ch_details[ch, 0].startswith('bea'):
output_type = self.ch_details[ch, 0].split(' ')[1]
bearing_name = items[0]
units = self.ch_details[ch, 1]
# there is no label option for the bearing output
# and tag it
tag = 'bearing-%s-%s-%s' % (bearing_name, output_type, units)
# save all info in the dict
channelinfo = {}
channelinfo['bearing_name'] = bearing_name
channelinfo['output_type'] = output_type
channelinfo['units'] = units
channelinfo['chi'] = ch
# -----------------------------------------------------------------
# AS DEFINED IN: ch_aero
# AERO CL, CD, CM, VREL, ALFA, LIFT, DRAG, etc
# Cl, R= 0.5 deg Cl of blade 1 at radius 0.49
# Azi 1 deg Azimuth of blade 1
# NOTE THAT RADIUS FROM ch_details[ch, 0] REFERS TO THE RADIUS
# YOU ASKED FOR, AND ch_details[ch, 2] IS WHAT YOU GET, which is
# still based on a mean radius (deflections change the game)
elif self.ch_details[ch, 0].split(',')[0] in ch_aero:
dscr_list = self.ch_details[ch, 2].split(' ')
dscr_list = misc.remove_items(dscr_list, '')
sensortype = self.ch_details[ch, 0].split(',')[0]
# Blade number is identified as the first integer in the string
blade_nr = re.search(r'\d+', self.ch_details[ch, 2]).group()
blade_nr = int(blade_nr)
# sometimes the units for aero sensors are wrong!
units = self.ch_details[ch, 1]
# there is no label option
# radius what you get
# radius = dscr_list[-1]
# radius what you asked for, identified as the last float in the string
s = self.ch_details[ch, 2]
radius = float(re.findall(r"[-+]?\d*\.\d+|\d+", s)[-1])
# and tag it
tag = '%s-%s-%s' % (sensortype, blade_nr, radius)
# save all info in the dict
channelinfo = {}
channelinfo['sensortype'] = sensortype
channelinfo['radius'] = float(radius)
channelinfo['blade_nr'] = blade_nr
channelinfo['units'] = units
channelinfo['chi'] = ch
# -----------------------------------------------------------------
# for the induction grid over the rotor
# a_grid, azi 0.00 r 1.74
elif self.ch_details[ch, 0].split(',')[0] in ch_aerogrid:
items = self.ch_details[ch, 0].split(',')
sensortype = items[0]
items2 = items[1].split(' ')
items2 = misc.remove_items(items2, '')
azi = items2[1]
# radius what you asked for
radius = items2[3]
units = self.ch_details[ch, 1]
# and tag it
tag = '%s-azi-%s-r-%s' % (sensortype,azi,radius)
# save all info in the dict
channelinfo = {}
channelinfo['sensortype'] = sensortype
channelinfo['radius'] = float(radius)
channelinfo['azimuth'] = float(azi)
channelinfo['units'] = units
channelinfo['chi'] = ch
# -----------------------------------------------------------------
# INDUCTION AT THE BLADE
# 0: Induc. Vz, rpco, R= 1.4
# 1: m/s
# 2: Induced wsp Vz of blade 1 at radius 1.37, RP. coo.
# Induc. Vx, locco, R= 1.4
# Induced wsp Vx of blade 1 at radius 1.37, local ae coo.
# Induc. Vy, blco, R= 1.4
# Induced wsp Vy of blade 1 at radius 1.37, local bl coo.
# Induc. Vz, glco, R= 1.4
# Induced wsp Vz of blade 1 at radius 1.37, global coo.
# Induc. Vx, rpco, R= 8.4
# Induced wsp Vx of blade 1 at radius 8.43, RP. coo.
elif self.ch_details[ch, 0].strip()[:5] == 'Induc':
items = self.ch_details[ch, 2].split(' ')
items = misc.remove_items(items, '')
coord = self.ch_details[ch, 2].split(', ')[1].strip()
blade_nr = int(items[5])
# radius what you get
# radius = float(items[8].replace(',', ''))
# radius what you asked for, identified as the last float in the string
s = self.ch_details[ch, 2]
radius = float(re.findall(r"[-+]?\d*\.\d+|\d+", s)[-1])
items = self.ch_details[ch, 0].split(',')
component = items[0][-2:]
units = self.ch_details[ch, 1]
# and tag it
rpl = (coord, blade_nr, component, radius)
tag = 'induc-%s-blade-%1i-%s-r-%03.01f' % rpl
# save all info in the dict
channelinfo = {}
channelinfo['blade_nr'] = blade_nr
channelinfo['sensortype'] = 'induction'
channelinfo['radius'] = radius
channelinfo['coord'] = coord
channelinfo['component'] = component
channelinfo['units'] = units
channelinfo['chi'] = ch
# -----------------------------------------------------------------
# MORE AERO SENSORS
# Ae intfrc Fx, rpco, R= 0.0
# Aero int. force Fx of blade 1 at radius 0.00, RP coo.
# Ae secfrc Fy, R= 25.0
# Aero force Fy of blade 1 at radius 24.11
# Ae pos x, glco, R= 88.2
# Aero position x of blade 1 at radius 88.17, global coo.
elif self.ch_details[ch, 0].strip()[:2] == 'Ae':
units = self.ch_details[ch, 1]
items = self.ch_details[ch, 2].split(' ')
items = misc.remove_items(items, '')
# Blade number is identified as the first integer in the string
blade_nr = re.search(r'\d+', self.ch_details[ch, 2]).group()
blade_nr = int(blade_nr)
# radius what you get
tmp = self.ch_details[ch, 2].split('radius ')[1].strip()
tmp = tmp.split(',')
# radius = float(tmp[0])
# radius what you asked for, identified as the last float in the string
s = self.ch_details[ch, 2]
radius = float(re.findall(r"[-+]?\d*\.\d+|\d+", s)[-1])
if len(tmp) > 1:
coord = tmp[1].strip()
else:
coord = 'aero'
items = self.ch_details[ch, 0].split(' ')
sensortype = items[1]
component = items[2].replace(',', '')
# save all info in the dict
channelinfo = {}
channelinfo['blade_nr'] = blade_nr
channelinfo['sensortype'] = sensortype
channelinfo['radius'] = radius
channelinfo['coord'] = coord
channelinfo['component'] = component
channelinfo['units'] = units
channelinfo['chi'] = ch
rpl = (coord, blade_nr, sensortype, component, radius)
tag = 'aero-%s-blade-%1i-%s-%s-r-%03.01f' % rpl
# TODO: wind speed
# some spaces have been trimmed here
# WSP gl. coo.,Vy m/s
# // Free wind speed Vy, gl. coo, of gl. pos 0.00, 0.00, -2.31
# WSP gl. coo.,Vdir_hor deg
# Free wind speed Vdir_hor, gl. coo, of gl. pos 0.00, 0.00, -2.31
# -----------------------------------------------------------------
# WATER SURFACE gl. coo, at gl. coo, x,y= 0.00, 0.00
elif self.ch_details[ch, 2].startswith('Water'):
units = self.ch_details[ch, 1]
# but remove the comma
x = items[-2][:-1]
y = items[-1]
# and tag it
tag = 'watersurface-global-%s-%s' % (x, y)
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = 'global'
channelinfo['pos'] = (float(x), float(y))
channelinfo['units'] = units
channelinfo['chi'] = ch
# -----------------------------------------------------------------
# WIND SPEED
# WSP gl. coo.,Vx
# Free wind speed Vx, gl. coo, of gl. pos 0.00, 0.00, -6.00 LABEL
elif self.ch_details[ch, 0].startswith('WSP gl.'):
units = self.ch_details[ch, 1]
direction = self.ch_details[ch, 0].split(',')[1]
tmp = self.ch_details[ch, 2].split('pos')[1]
x, y, z = tmp.split(',')
x, y, z = x.strip(), y.strip(), z.strip()
tmp = z.split(' ')
sensortag = ''
if len(tmp) == 2:
z, sensortag = tmp
elif len(tmp) == 1:
z = tmp[0]
# and tag it
tag = 'windspeed-global-%s-%s-%s-%s' % (direction, x, y, z)
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = 'global'
channelinfo['pos'] = (x, y, z)
channelinfo['units'] = units
channelinfo['chi'] = ch
channelinfo['sensortag'] = sensortag
# FIXME: direction is the same as component, right?
channelinfo['direction'] = direction
channelinfo['sensortype'] = 'wsp-global'
# WIND SPEED AT BLADE
# 0: WSP Vx, glco, R= 61.5
# 2: Wind speed Vx of blade 1 at radius 61.52, global coo.
elif self.ch_details[ch, 0].startswith('WSP V'):
units = self.ch_details[ch, 1].strip()
tmp = self.ch_details[ch, 0].split(' ')[1].strip()
direction = tmp.replace(',', '')
coord = self.ch_details[ch, 2].split(',')[1].strip()
# Blade number is identified as the first integer in the string
blade_nr = re.search(r'\d+', self.ch_details[ch, 2]).group()
blade_nr = int(blade_nr)
# radius what you get
# radius = self.ch_details[ch, 2].split('radius')[1].split(',')[0]
# radius = radius.strip()
# radius what you asked for, identified as the last float in the string
s=self.ch_details[ch, 2]
radius=float(re.findall(r"[-+]?\d*\.\d+|\d+", s)[-1])
# and tag it
rpl = (direction, blade_nr, radius, coord)
tag = 'wsp-blade-%s-%s-%s-%s' % rpl
# save all info in the dict
channelinfo = {}
channelinfo['coord'] = coord
# FIXME: direction is the same as component, right?
channelinfo['direction'] = direction
channelinfo['blade_nr'] = blade_nr
channelinfo['radius'] = float(radius)
channelinfo['units'] = units
channelinfo['chi'] = ch
channelinfo['sensortype'] = 'wsp-blade'
# FLAP ANGLE
# 2: Flap angle for blade 3 flap number 1
elif self.ch_details[ch, 0][:7] == 'setbeta':
units = self.ch_details[ch, 1].strip()
# Blade number is identified as the first integer in the string
blade_nr = re.search(r'\d+', self.ch_details[ch, 2]).group()
blade_nr = int(blade_nr)
flap_nr = self.ch_details[ch, 2].split(' ')[-1].strip()
# and tag it
tag = 'setbeta-bladenr-%s-flapnr-%s' % (blade_nr, flap_nr)
# save all info in the dict
channelinfo = {}
channelinfo['flap_nr'] = int(flap_nr)
channelinfo['blade_nr'] = blade_nr
channelinfo['units'] = units
channelinfo['chi'] = ch
# harmonic channel output
# Harmonic
# Harmonic sinus function
elif self.ch_details[ch, 0][:7] == 'Harmoni':
func_name = ' '.join(self.ch_details[ch, 1].split(' ')[1:])
channelinfo = {}
channelinfo['output_type'] = func_name
channelinfo['sensortype'] = 'harmonic'
channelinfo['chi'] = ch
base = self.ch_details[ch,2].strip().lower().replace(' ', '_')
tag = base + '_0'
if tag in self.ch_dict:
tag_nr = int(tag.split('_')[-1]) + 1
tag = base + '_%i' % tag_nr
elif self.ch_details[ch, 0][:6] == 'a_norm':
channelinfo = {}
channelinfo['chi'] = ch
channelinfo['units'] = self.ch_details[ch, 1].strip()
channelinfo['sensortype'] = 'aero'
tag = 'aero-induc_a_norm'
# -----------------------------------------------------------------
# If all this fails, just combine channel name and description
else:
tag = '-'.join(self.ch_details[ch,:3].tolist())
channelinfo = {}
channelinfo['chi'] = ch
channelinfo['units'] = self.ch_details[ch, 1].strip()
# -----------------------------------------------------------------
# add a v_XXX tag in case the channel already exists
if tag in self.ch_dict:
jj = 1
while True:
tag_new = tag + '_v%i' % jj
if tag_new in self.ch_dict:
jj += 1
else:
tag = tag_new
break
self.ch_dict[tag] = copy.copy(channelinfo)
# -----------------------------------------------------------------
# save in for DataFrame format
cols_ch = set(channelinfo.keys())
for col in cols_ch:
df_dict[col].append(channelinfo[col])
# the remainder columns we have not had yet. Fill in blank
for col in (self.cols - cols_ch):
df_dict[col].append('')
df_dict['unique_ch_name'].append(tag)
self.ch_df = pd.DataFrame(df_dict)
self.ch_df.set_index('chi', inplace=True)
self.ch_dict, self.ch_df = unified_channel_names(self.ChInfo)
def _ch_dict2df(self): def _ch_dict2df(self):
""" """
...@@ -1434,7 +700,7 @@ class LoadResults(ReadHawc2): ...@@ -1434,7 +700,7 @@ class LoadResults(ReadHawc2):
stats['range'] = stats['max'] - stats['min'] stats['range'] = stats['max'] - stats['min']
stats['absmax'] = np.absolute(sig[i0:i1, :]).max(axis=0) stats['absmax'] = np.absolute(sig[i0:i1, :]).max(axis=0)
stats['rms'] = np.sqrt(np.mean(sig[i0:i1, :]*sig[i0:i1, :], axis=0)) stats['rms'] = np.sqrt(np.mean(sig[i0:i1, :]*sig[i0:i1, :], axis=0))
stats['int'] = integrate.trapz(sig[i0:i1, :], x=sig[i0:i1, 0], axis=0) stats['int'] = trapezoid(sig[i0:i1, :], x=sig[i0:i1, 0], axis=0)
return stats return stats
def statsdel_df(self, i0=0, i1=None, statchans='all', delchans='all', def statsdel_df(self, i0=0, i1=None, statchans='all', delchans='all',
...@@ -1501,8 +767,8 @@ class LoadResults(ReadHawc2): ...@@ -1501,8 +767,8 @@ class LoadResults(ReadHawc2):
statsdel['range'] = statsdel['max'] - statsdel['min'] statsdel['range'] = statsdel['max'] - statsdel['min']
statsdel['absmax'] = np.abs(datasel).max(axis=0) statsdel['absmax'] = np.abs(datasel).max(axis=0)
statsdel['rms'] = np.sqrt(np.mean(datasel*datasel, axis=0)) statsdel['rms'] = np.sqrt(np.mean(datasel*datasel, axis=0))
statsdel['int'] = integrate.trapz(datasel, x=time, axis=0) statsdel['int'] = trapezoid(datasel, x=time, axis=0)
statsdel['intabs'] = integrate.trapz(np.abs(datasel), x=time, axis=0) statsdel['intabs'] = trapezoid(np.abs(datasel), x=time, axis=0)
if neq is None: if neq is None:
neq = self.sig[-1,0] - self.sig[0,0] neq = self.sig[-1,0] - self.sig[0,0]
...@@ -1510,7 +776,7 @@ class LoadResults(ReadHawc2): ...@@ -1510,7 +776,7 @@ class LoadResults(ReadHawc2):
for chi, chan in zip(delchis, delchans): for chi, chan in zip(delchis, delchans):
signal = self.sig[i0:i1,chi] signal = self.sig[i0:i1,chi]
eq = self.calc_fatigue(signal, no_bins=no_bins, neq=neq, m=m) eq = self.calc_fatigue(signal, no_bins=no_bins, neq=neq, m=m)
statsdel.loc[chan][m_cols] = eq statsdel.loc[chan, m_cols] = eq
return statsdel return statsdel
...@@ -1591,6 +857,49 @@ class LoadResults(ReadHawc2): ...@@ -1591,6 +857,49 @@ class LoadResults(ReadHawc2):
return np.array(zvals), np.array(yvals) return np.array(zvals), np.array(yvals)
def add_channel(self, data, name, units, description='', options=None):
"""Add a channel to self.sig and self.ch_df such that self.statsdel_df
also calculates the statistics for this channel.
Parameters
----------
data : np.ndarray(n, 1)
Array containing the new channel. Should be of shape (n, 1). If not
it will be reshaped to (len(data),1).
name : str
Unique name of the new channel
units : str
Channel units
description : str, default=''
channel description
"""
# valid keys for self.res.ch_df
# add = {'radius':np.nan, 'bearing_name':'', 'azimuth':np.nan, 'coord':'',
# 'sensortype':'', 'io_nr':np.nan, 'wake_source_nr':np.nan,
# 'dll':'', 'direction':'', 'blade_nr':np.nan, 'bodyname':'',
# 'pos':'', 'flap_nr':'', 'sensortag':'', 'component':'', 'units':'',
# 'io':'', 'unique_ch_name':'new_channel'}
add = {k:'' for k in self.ch_df.columns}
if options is not None:
add.update(options)
add['unique_ch_name'] = name
row = [add[k] for k in self.ch_df.columns]
# add the meta-data to ch_df and ch_details
self.ch_df.loc[len(self.ch_df)] = row
cols = [[name, units, description]]
self.ch_details = np.append(self.ch_details, cols, axis=0)
# and add to the results array
if data.shape != (len(data),1):
data = data.reshape(len(data),1)
self.sig = np.append(self.sig, data, axis=1)
def save_chan_names(self, fname): def save_chan_names(self, fname):
"""Save unique channel names to text file. """Save unique channel names to text file.
""" """
...@@ -1656,6 +965,16 @@ class LoadResults(ReadHawc2): ...@@ -1656,6 +965,16 @@ class LoadResults(ReadHawc2):
self.ch_details self.ch_details
self.ch_dict self.ch_dict
def save_matlab(self, fname):
"""Save output in Matlab format.
"""
# all channels
details = np.zeros((self.sig.shape[1],4), dtype=np.object)
for i in range(self.sig.shape[1]):
details[i,0:3] = self.ch_details[i,:]
details[i,3] = self.ch_df.loc[i,'unique_ch_name']
sio.savemat(fname, {'sig':self.sig, 'description':details})
def ReadOutputAtTime(fname): def ReadOutputAtTime(fname):
"""Distributed blade loading as generated by the HAWC2 output_at_time """Distributed blade loading as generated by the HAWC2 output_at_time
...@@ -1675,7 +994,7 @@ def ReadOutputAtTime(fname): ...@@ -1675,7 +994,7 @@ def ReadOutputAtTime(fname):
# data.index.names = cols # data.index.names = cols
# because the formatting is really weird, we need to sanatize it a bit # because the formatting is really weird, we need to sanatize it a bit
with opent(fname, 'r') as f: with open(fname, 'r') as f:
# read the header from line 3 # read the header from line 3
for k in range(7): for k in range(7):
line = f.readline() line = f.readline()
...@@ -1713,7 +1032,7 @@ def ReadEigenBody(fname, debug=False): ...@@ -1713,7 +1032,7 @@ def ReadEigenBody(fname, debug=False):
# Body data for body number : 3 with the name :nacelle # Body data for body number : 3 with the name :nacelle
# Results: fd [Hz] fn [Hz] log.decr [%] # Results: fd [Hz] fn [Hz] log.decr [%]
# Mode nr: 1: 1.45388E-21 1.74896E-03 6.28319E+02 # Mode nr: 1: 1.45388E-21 1.74896E-03 6.28319E+02
FILE = opent(fname) FILE = open(fname)
lines = FILE.readlines() lines = FILE.readlines()
FILE.close() FILE.close()
...@@ -1760,7 +1079,7 @@ def ReadEigenBody(fname, debug=False): ...@@ -1760,7 +1079,7 @@ def ReadEigenBody(fname, debug=False):
return pd.DataFrame(df_dict) return pd.DataFrame(df_dict)
def ReadEigenStructure(file_path, file_name, debug=False, max_modes=500): def ReadEigenStructure(fname, debug=False):
""" """
Read HAWC2 structure eigenalysis result file Read HAWC2 structure eigenalysis result file
============================================ ============================================
...@@ -1811,7 +1130,7 @@ def ReadEigenStructure(file_path, file_name, debug=False, max_modes=500): ...@@ -1811,7 +1130,7 @@ def ReadEigenStructure(file_path, file_name, debug=False, max_modes=500):
# 8 Mode nr: 1: 3.58673E+00 3.58688E+00 5.81231E+00 # 8 Mode nr: 1: 3.58673E+00 3.58688E+00 5.81231E+00
# Mode nr:294: 0.00000E+00 6.72419E+09 6.28319E+02 # Mode nr:294: 0.00000E+00 6.72419E+09 6.28319E+02
FILE = opent(os.path.join(file_path, file_name)) FILE = open(fname)
lines = FILE.readlines() lines = FILE.readlines()
FILE.close() FILE.close()
...@@ -1820,25 +1139,52 @@ def ReadEigenStructure(file_path, file_name, debug=False, max_modes=500): ...@@ -1820,25 +1139,52 @@ def ReadEigenStructure(file_path, file_name, debug=False, max_modes=500):
# we now the number of modes by having the number of lines # we now the number of modes by having the number of lines
nrofmodes = len(lines) - header_lines nrofmodes = len(lines) - header_lines
modes_arr = np.ndarray((3, nrofmodes)) df = pd.DataFrame(np.ndarray((nrofmodes, 3)), dtype=np.float64,
columns=['Fd_hz', 'Fn_hz', 'log_decr_pct'])
for i, line in enumerate(lines): for i, line in enumerate(lines):
if i > max_modes: # if i > max_modes:
# cut off the unused rest # # cut off the unused rest
modes_arr = modes_arr[:, :i] # df.iloc[:,i] = modes_arr[:, :i]
break # break
# ignore the header # ignore the header
if i < header_lines: if i < header_lines:
continue continue
# split up mode nr from the rest # split up mode nr from the rest, remove line ending
parts = line.split(':') parts = line[:-1].split(':')
# modenr = int(parts[1]) #modenr = int(parts[1])
# get fd, fn and damping, but remove all empty items on the list # get fd, fn and damping, but remove all empty items on the list
modes_arr[:, i-header_lines]=misc.remove_items(parts[2].split(' '), '') # also cut off line
df.iloc[i-header_lines,:]=np.float64(misc.remove_items(parts[2].split(' '), ''))
return df
def ReadStructInertia(fname):
return modes_arr with open(fname) as f:
lines = f.readlines()
marks = []
for i, line in enumerate(lines):
if line.startswith('_________') > 0:
marks.append(i)
header = ['body_name'] + lines[7].split()[2:]
data = lines[9:marks[4]]
bodies = {i:[] for i in header}
for row in data:
row_els = row[:-1].split()
for colname, col in zip(header, row_els):
bodies[colname].append(col)
bodies = pd.DataFrame(bodies)
for k in header[1:]:
bodies[k] = bodies[k].astype(float)
return bodies
class UserWind(object): class UserWind(object):
...@@ -2056,7 +1402,7 @@ class UserWind(object): ...@@ -2056,7 +1402,7 @@ class UserWind(object):
u_comp, v_comp, w_comp, v_coord, w_coord, phi_deg u_comp, v_comp, w_comp, v_coord, w_coord, phi_deg
""" """
# read the header # read the header
with opent(fname) as f: with open(fname) as f:
for i, line in enumerate(f.readlines()): for i, line in enumerate(f.readlines()):
if line.strip()[0] != '#': if line.strip()[0] != '#':
nr_v, nr_w = misc.remove_items(line.split('#')[0].split(), '') nr_v, nr_w = misc.remove_items(line.split('#')[0].split(), '')
...@@ -2186,260 +1532,6 @@ class WindProfiles(object): ...@@ -2186,260 +1532,6 @@ class WindProfiles(object):
return a_phi * t1 * t2 * t3 return a_phi * t1 * t2 * t3
class Turbulence(object):
def __init__(self):
pass
def read_hawc2(self, fpath, shape):
"""
Read the HAWC2 turbulence format
"""
fid = open(fpath, 'rb')
tmp = np.fromfile(fid, 'float32', shape[0]*shape[1]*shape[2])
turb = np.reshape(tmp, shape)
return turb
def read_bladed(self, fpath, basename):
fid = open(fpath + basename + '.wnd', 'rb')
R1 = struct.unpack('h', fid.read(2))[0]
R2 = struct.unpack('h', fid.read(2))[0]
turb = struct.unpack('i', fid.read(4))[0]
lat = struct.unpack('f', fid.read(4))[0]
rough = struct.unpack('f', fid.read(4))[0]
refh = struct.unpack('f', fid.read(4))[0]
longti = struct.unpack('f', fid.read(4))[0]
latti = struct.unpack('f', fid.read(4))[0]
vertti = struct.unpack('f', fid.read(4))[0]
dv = struct.unpack('f', fid.read(4))[0]
dw = struct.unpack('f', fid.read(4))[0]
du = struct.unpack('f', fid.read(4))[0]
halfalong = struct.unpack('i', fid.read(4))[0]
mean_ws = struct.unpack('f', fid.read(4))[0]
VertLongComp = struct.unpack('f', fid.read(4))[0]
LatLongComp = struct.unpack('f', fid.read(4))[0]
LongLongComp = struct.unpack('f', fid.read(4))[0]
Int = struct.unpack('i', fid.read(4))[0]
seed = struct.unpack('i', fid.read(4))[0]
VertGpNum = struct.unpack('i', fid.read(4))[0]
LatGpNum = struct.unpack('i', fid.read(4))[0]
VertLatComp = struct.unpack('f', fid.read(4))[0]
LatLatComp = struct.unpack('f', fid.read(4))[0]
LongLatComp = struct.unpack('f', fid.read(4))[0]
VertVertComp = struct.unpack('f', fid.read(4))[0]
LatVertComp = struct.unpack('f', fid.read(4))[0]
LongVertComp = struct.unpack('f', fid.read(4))[0]
points = np.fromfile(fid, 'int16', 2*halfalong*VertGpNum*LatGpNum*3)
fid.close()
return points
def convert2bladed(self, fpath, basename, shape=(4096,32,32)):
"""
Convert turbulence box to BLADED format
"""
u = self.read_hawc2(fpath + basename + 'u.bin', shape)
v = self.read_hawc2(fpath + basename + 'v.bin', shape)
w = self.read_hawc2(fpath + basename + 'w.bin', shape)
# mean velocity components at the center of the box
v1, v2 = (shape[1]/2)-1, shape[1]/2
w1, w2 = (shape[2]/2)-1, shape[2]/2
ucent = (u[:, v1, w1] + u[:, v1, w2] + u[:, v2, w1] + u[:, v2, w2]) / 4.0
vcent = (v[:, v1, w1] + v[:, v1, w2] + v[:, v2, w1] + v[:, v2, w2]) / 4.0
wcent = (w[:, v1, w1] + w[:, v1, w2] + w[:, v2, w1] + w[:, v2, w2]) / 4.0
# FIXME: where is this range 351:7374 coming from?? The original script
# considered a box of lenght 8192
umean = np.mean(ucent[351:7374])
vmean = np.mean(vcent[351:7374])
wmean = np.mean(wcent[351:7374])
ustd = np.std(ucent[351:7374])
vstd = np.std(vcent[351:7374])
wstd = np.std(wcent[351:7374])
# gives a slight different outcome, but that is that significant?
# umean = np.mean(u[351:7374,15:17,15:17])
# vmean = np.mean(v[351:7374,15:17,15:17])
# wmean = np.mean(w[351:7374,15:17,15:17])
# this is wrong since we want the std on the center point
# ustd = np.std(u[351:7374,15:17,15:17])
# vstd = np.std(v[351:7374,15:17,15:17])
# wstd = np.std(w[351:7374,15:17,15:17])
iu = np.zeros(shape)
iv = np.zeros(shape)
iw = np.zeros(shape)
iu[:, :, :] = (u - umean)/ustd*1000.0
iv[:, :, :] = (v - vmean)/vstd*1000.0
iw[:, :, :] = (w - wmean)/wstd*1000.0
# because MATLAB and Octave do a round when casting from float to int,
# and Python does a floor, we have to round first
np.around(iu, decimals=0, out=iu)
np.around(iv, decimals=0, out=iv)
np.around(iw, decimals=0, out=iw)
return iu.astype(np.int16), iv.astype(np.int16), iw.astype(np.int16)
def write_bladed(self, fpath, basename, shape):
"""
Write turbulence BLADED file
"""
# TODO: get these parameters from a HAWC2 input file
seed = 6
mean_ws = 11.4
turb = 3
R1 = -99
R2 = 4
du = 0.974121094
dv = 4.6875
dw = 4.6875
longti = 14
latti = 9.8
vertti = 7
iu, iv, iw = self.convert2bladed(fpath, basename, shape=shape)
fid = open(fpath + basename + '.wnd', 'wb')
fid.write(struct.pack('h', R1)) # R1
fid.write(struct.pack('h', R2)) # R2
fid.write(struct.pack('i', turb)) # Turb
fid.write(struct.pack('f', 999)) # Lat
fid.write(struct.pack('f', 999)) # rough
fid.write(struct.pack('f', 999)) # refh
fid.write(struct.pack('f', longti)) # LongTi
fid.write(struct.pack('f', latti)) # LatTi
fid.write(struct.pack('f', vertti)) # VertTi
fid.write(struct.pack('f', dv)) # VertGpSpace
fid.write(struct.pack('f', dw)) # LatGpSpace
fid.write(struct.pack('f', du)) # LongGpSpace
fid.write(struct.pack('i', shape[0]/2)) # HalfAlong
fid.write(struct.pack('f', mean_ws)) # meanWS
fid.write(struct.pack('f', 999.)) # VertLongComp
fid.write(struct.pack('f', 999.)) # LatLongComp
fid.write(struct.pack('f', 999.)) # LongLongComp
fid.write(struct.pack('i', 999)) # Int
fid.write(struct.pack('i', seed)) # Seed
fid.write(struct.pack('i', shape[1])) # VertGpNum
fid.write(struct.pack('i', shape[2])) # LatGpNum
fid.write(struct.pack('f', 999)) # VertLatComp
fid.write(struct.pack('f', 999)) # LatLatComp
fid.write(struct.pack('f', 999)) # LongLatComp
fid.write(struct.pack('f', 999)) # VertVertComp
fid.write(struct.pack('f', 999)) # LatVertComp
fid.write(struct.pack('f', 999)) # LongVertComp
# fid.flush()
# bladed2 = np.ndarray((shape[0], shape[2], shape[1], 3), dtype=np.int16)
# for i in xrange(shape[0]):
# for k in xrange(shape[1]):
# for j in xrange(shape[2]):
# fid.write(struct.pack('i', iu[i, shape[1]-j-1, k]))
# fid.write(struct.pack('i', iv[i, shape[1]-j-1, k]))
# fid.write(struct.pack('i', iw[i, shape[1]-j-1, k]))
# bladed2[i,k,j,0] = iu[i, shape[1]-j-1, k]
# bladed2[i,k,j,1] = iv[i, shape[1]-j-1, k]
# bladed2[i,k,j,2] = iw[i, shape[1]-j-1, k]
# re-arrange array for bladed format
bladed = np.ndarray((shape[0], shape[2], shape[1], 3), dtype=np.int16)
bladed[:, :, :, 0] = iu[:, ::-1, :]
bladed[:, :, :, 1] = iv[:, ::-1, :]
bladed[:, :, :, 2] = iw[:, ::-1, :]
bladed_swap_view = bladed.swapaxes(1,2)
bladed_swap_view.tofile(fid, format='%int16')
fid.flush()
fid.close()
class Bladed(object):
def __init__(self):
"""
Some BLADED results I have seen are just weird text files. Convert
them to a more convienent format.
path/to/file
channel 1 description
col a name/unit col b name/unit
a0 b0
a1 b1
...
path/to/file
channel 2 description
col a name/unit col b name/unit
...
"""
pass
def infer_format(self, lines):
"""
Figure out how many channels and time steps are included
"""
count = 1
for line in lines[1:]:
if line == lines[0]:
break
count += 1
iters = count - 3
chans = len(lines) / (iters + 3)
return int(chans), int(iters)
def read(self, fname, chans=None, iters=None, enc='cp1252'):
"""
Parameters
----------
fname : str
chans : int, default=None
iters : int, default=None
enc : str, default='cp1252'
character encoding of the source file. Usually BLADED is used on
windows so Western-European windows encoding is a safe bet.
"""
with codecs.opent(fname, 'r', enc) as f:
lines = f.readlines()
nrl = len(lines)
if chans is None and iters is None:
chans, iters = self.infer_format(lines)
if iters is not None:
chans = int(nrl / (iters + 3))
if chans is not None:
iters = int((nrl / chans) - 3)
# file_head = [ [k[:-2],0] for k in lines[0:nrl:iters+3] ]
# chan_head = [ [k[:-2],0] for k in lines[1:nrl:iters+3] ]
# cols_head = [ k.split('\t')[:2] for k in lines[2:nrl:iters+3] ]
data = {}
for k in range(chans):
# take the column header from the 3 comment line, but
head = lines[2 + (3 + iters)*k][:-2].split('\t')[1].encode('utf-8')
i0 = 3 + (3 + iters)*k
i1 = i0 + iters
data[head] = np.array([k[:-2].split('\t')[1] for k in lines[i0:i1:1]])
data[head] = data[head].astype(np.float64)
time = np.array([k[:-2].split('\t')[0] for k in lines[i0:i1:1]])
df = pd.DataFrame(data, index=time.astype(np.float64))
df.index.name = lines[0][:-2]
return df
if __name__ == '__main__': if __name__ == '__main__':
pass pass
...@@ -19,11 +19,6 @@ Command line options ...@@ -19,11 +19,6 @@ Command line options
Author: Jenni Rinker, rink@dtu.dk Author: Jenni Rinker, rink@dtu.dk
""" """
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from __future__ import absolute_import
from argparse import ArgumentParser from argparse import ArgumentParser
import numpy as np import numpy as np
import os import os
......
'''
Created on 29/05/2013
@author: Mads M. Pedersen (mmpe@dtu.dk)
'''
import cython
import numpy as np
#cimport numpy as np
@cython.ccall
@cython.locals(alpha=cython.float, i=cython.int)
def cy_low_pass_filter(inp, delta_t, tau): #cpdef cy_low_pass_filter(np.ndarray[double,ndim=1] inp, double delta_t, double tau):
#cdef np.ndarray[double,ndim=1] output
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] + alpha * (inp[i] - output[i - 1]) # Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
return output
def cy_dynamic_low_pass_filter(inp, delta_t, tau, method=1): #cpdef cy_dynamic_low_pass_filter(np.ndarray[double,ndim=1] inp, double delta_t, np.ndarray[double,ndim=1] tau, int method=1):
#cdef np.ndarray[double,ndim=1] output, alpha
#cdef int i
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
if method == 1:
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] + alpha[i] * (inp[i] - output[i - 1]) # Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
elif method == 2:
for i in range(1, inp.shape[0]):
output[i] = (delta_t * (inp[i] + inp[i - 1] - output[i - 1]) + 2 * tau[i] * output[i - 1]) / (delta_t + 2 * tau[i])
elif method == 3:
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] * np.exp(-delta_t / tau[i]) + inp[i] * (1 - np.exp(-delta_t / tau[i]))
return output
def cy_dynamic_low_pass_filter_2d(inp, delta_t, tau, method=1): #cpdef cy_dynamic_low_pass_filter_2d(np.ndarray[double,ndim=2] inp, double delta_t, np.ndarray[double,ndim=1] tau, int method=1):
#cdef np.ndarray[double,ndim=2] output, alpha
#cdef int i
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
if method == 1:
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] + alpha[i] * (inp[i] - output[i - 1]) # Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
elif method == 2:
for i in range(1, inp.shape[0]):
output[i] = (delta_t * (inp[i] + inp[i - 1] - output[i - 1]) + 2 * tau[i] * output[i - 1]) / (delta_t + 2 * tau[i])
elif method == 3:
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] * np.exp(-delta_t / tau[i]) + inp[i] * (1 - np.exp(-delta_t / tau[i]))
return output
def cy_dynamic_low_pass_filter_test(inp): #cpdef cy_dynamic_low_pass_filter_test(np.ndarray[double,ndim=2] inp):
#cdef np.ndarray[double,ndim=2] output, alpha
#cdef int i
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
for i in range(1, inp.shape[0]):
output[i] = inp[i]
return output
@cython.ccall
@cython.locals(alpha=cython.float, i=cython.int)
def cy_high_pass_filter(inp, delta_t, tau): #cpdef cy_high_pass_filter(np.ndarray[double,ndim=1] inp, double delta_t, double tau):
#cdef np.ndarray[double,ndim=1] output
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
alpha = tau / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = alpha * (output[i - 1] + inp[i] - inp[i - 1])
return output
import cython
import numpy as np
cimport numpy as np
'''
Created on 29/05/2013
@author: Mads M. Pedersen (mmpe@dtu.dk)
'''
import cython
import numpy as np
cimport numpy as np
@cython.ccall
@cython.locals(alpha=cython.float, i=cython.int)
cpdef cy_low_pass_filter(np.ndarray[double,ndim=1] inp, double delta_t, double tau):
cdef np.ndarray[double,ndim=1] output
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] + alpha * (inp[i] - output[i - 1]) # Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
return output
cpdef cy_dynamic_low_pass_filter(np.ndarray[double,ndim=1] inp, double delta_t, np.ndarray[double,ndim=1] tau, int method):
cdef np.ndarray[double,ndim=1] output, alpha
cdef int i
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
if method == 1:
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] + alpha[i] * (inp[i] - output[i - 1]) # Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
elif method == 2:
for i in range(1, inp.shape[0]):
output[i] = (delta_t * (inp[i] + inp[i - 1] - output[i - 1]) + 2 * tau[i] * output[i - 1]) / (delta_t + 2 * tau[i])
elif method == 3:
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] * np.exp(-delta_t / tau[i]) + inp[i] * (1 - np.exp(-delta_t / tau[i]))
return output
@cython.ccall
@cython.locals(alpha=cython.float, i=cython.int)
cpdef cy_high_pass_filter(np.ndarray[double,ndim=1] inp, double delta_t, double tau):
cdef np.ndarray[double,ndim=1] output
output = np.empty_like(inp, dtype=np.float)
output[0] = inp[0]
alpha = tau / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = alpha * (output[i - 1] + inp[i] - inp[i - 1])
return output
...@@ -4,19 +4,87 @@ Created on 10/01/2015 ...@@ -4,19 +4,87 @@ Created on 10/01/2015
@author: mmpe @author: mmpe
''' '''
import numpy as np import numpy as np
from numba.core.decorators import njit
def low_pass(input, delta_t, tau, method=1): def low_pass(input, delta_t, tau, method=1):
from wetb.signal.filters import cy_filters
if isinstance(tau, (int, float)): if isinstance(tau, (int, float)):
return cy_filters.cy_low_pass_filter(input.astype(np.float64), delta_t, tau) return low_pass_filter(input.astype(np.float64), delta_t, tau)
else: else:
if len(input.shape)==2: if len(input.shape) == 2:
return cy_filters.cy_dynamic_low_pass_filter_2d(input.astype(np.float64), delta_t, tau, method) return dynamic_low_pass_filter_2d(input.astype(np.float64), delta_t, tau, method)
else: else:
return cy_filters.cy_dynamic_low_pass_filter(input.astype(np.float64), delta_t, tau, method) return dynamic_low_pass_filter(input.astype(np.float64), delta_t, tau, method)
def high_pass(input, delta_t, tau): def high_pass(input, delta_t, tau):
from wetb.signal.filters import cy_filters return high_pass_filter(input.astype(np.float64), delta_t, tau)
@njit(cache=True)
def low_pass_filter(inp, delta_t, tau):
output = np.empty_like(inp, dtype=np.float32)
output[0] = inp[0]
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
# Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
output[i] = output[i - 1] + alpha * (inp[i] - output[i - 1])
return output
@njit(cache=True)
def dynamic_low_pass_filter(inp, delta_t, tau, method=1):
output = np.empty_like(inp, dtype=np.float32)
output[0] = inp[0]
if method == 1:
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
# Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
output[i] = output[i - 1] + alpha[i] * (inp[i] - output[i - 1])
elif method == 2:
for i in range(1, inp.shape[0]):
output[i] = (delta_t * (inp[i] + inp[i - 1] - output[i - 1]) +
2 * tau[i] * output[i - 1]) / (delta_t + 2 * tau[i])
elif method == 3:
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] * np.exp(-delta_t / tau[i]) + inp[i] * (1 - np.exp(-delta_t / tau[i]))
return output
@njit(cache=True)
def dynamic_low_pass_filter_2d(inp, delta_t, tau, method=1):
output = np.empty_like(inp, dtype=np.float32)
output[0] = inp[0]
if method == 1:
alpha = delta_t / (tau + delta_t)
for i in range(1, inp.shape[0]):
# Same as output[i] = alpha*inp[i]+(1-alpha)*output[i-1]
output[i] = output[i - 1] + alpha[i] * (inp[i] - output[i - 1])
elif method == 2:
for i in range(1, inp.shape[0]):
output[i] = (delta_t * (inp[i] + inp[i - 1] - output[i - 1]) +
2 * tau[i] * output[i - 1]) / (delta_t + 2 * tau[i])
elif method == 3:
for i in range(1, inp.shape[0]):
output[i] = output[i - 1] * np.exp(-delta_t / tau[i]) + inp[i] * (1 - np.exp(-delta_t / tau[i]))
return output
@njit(cache=True)
def high_pass_filter(inp, delta_t, tau):
output = np.empty_like(inp, dtype=np.float32)
output[0] = inp[0]
alpha = tau / (tau + delta_t)
for i in range(1, inp.shape[0]):
output[i] = alpha * (output[i - 1] + inp[i] - inp[i - 1])
return cy_filters.cy_high_pass_filter(input.astype(np.float64), delta_t, tau) return output
import numpy as np import numpy as np
from scipy.interpolate.interpolate import interp1d from scipy.interpolate import interp1d
def bin_fit(x, y, bins=10, kind='linear', bin_func=np.nanmean, bin_min_count=3, lower_upper='discard'):
def bin_fit(x,y, bins=10, kind='linear', bin_func=np.nanmean, bin_min_count=3, lower_upper='discard'):
"""Fit observations based on bin statistics """Fit observations based on bin statistics
Parameters Parameters
--------- ---------
x : array_like x : array_like
...@@ -29,29 +28,29 @@ def bin_fit(x,y, bins=10, kind='linear', bin_func=np.nanmean, bin_min_count=3, l ...@@ -29,29 +28,29 @@ def bin_fit(x,y, bins=10, kind='linear', bin_func=np.nanmean, bin_min_count=3, l
- "discard": - "discard":
- "extrapolate": - "extrapolate":
- int: Set f(max(x)) to mean of first/last int observations - int: Set f(max(x)) to mean of first/last int observations
Returns Returns
------- -------
bin_x, fit_function bin_x, fit_function
""" """
x, y = np.array(x[:]), np.array(y[:]) x, y = np.array(x[:]), np.array(y[:])
if isinstance(bins, int): if isinstance(bins, int):
bins = np.linspace(np.nanmin(x), np.nanmax(x) + 1e-10, bins + 1) bins = np.linspace(np.nanmin(x), np.nanmax(x) + 1e-10, bins + 1)
elif isinstance(bins, tuple) and len(bins)==2 and isinstance(bins[0], int) and isinstance(bins[1], int): elif isinstance(bins, tuple) and len(bins) == 2 and isinstance(bins[0], int) and isinstance(bins[1], int):
xbins, ybins = bins xbins, ybins = bins
if xbins>0: if xbins > 0:
xbinsx = np.linspace(np.nanmin(x), np.nanmax(x) + 1e-10, xbins + 1) xbinsx = np.linspace(np.nanmin(x), np.nanmax(x) + 1e-10, xbins + 1)
else: else:
xbinsx = [] xbinsx = []
if ybins>0: if ybins > 0:
x1, f1 = bin_fit(y,x, kind=1, bins=ybins) x1, f1 = bin_fit(y, x, kind=1, bins=ybins)
xbinsy = f1(x1) xbinsy = f1(x1)
else: else:
xbinsy = [] xbinsy = []
#x2, f2 = bin_fit(x,y, kind=1, bins=xbins) #x2, f2 = bin_fit(x,y, kind=1, bins=xbins)
bins = sorted(np.r_[xbinsx, xbinsy ]) bins = sorted(np.r_[xbinsx, xbinsy])
digitized = np.digitize(x, bins) digitized = np.digitize(x, bins)
digitized[np.isnan(x) | np.isnan(y)] = -1 digitized[np.isnan(x) | np.isnan(y)] = -1
...@@ -66,50 +65,50 @@ def bin_fit(x,y, bins=10, kind='linear', bin_func=np.nanmean, bin_min_count=3, l ...@@ -66,50 +65,50 @@ def bin_fit(x,y, bins=10, kind='linear', bin_func=np.nanmean, bin_min_count=3, l
#bin_x_fit, bin_y = [b[bin_count >= bin_min_count] for b in [bin_x, bin_y]] #bin_x_fit, bin_y = [b[bin_count >= bin_min_count] for b in [bin_x, bin_y]]
bin_x_fit = bin_x bin_x_fit = bin_x
m = np.isnan(bin_x_fit) m = np.isnan(bin_x_fit)
bin_x_fit[m] = ((bins[:-1]+bins[1:])/2)[m] bin_x_fit[m] = ((bins[:-1] + bins[1:]) / 2)[m]
bin_y_fit = bin_y.copy() bin_y_fit = bin_y.copy()
bin_y_fit[bin_count<bin_min_count]= np.nan bin_y_fit[bin_count < bin_min_count] = np.nan
if isinstance(lower_upper, (str, int)): if isinstance(lower_upper, (str, int)):
lower = upper = lower_upper lower = upper = lower_upper
else: else:
lower, upper = lower_upper lower, upper = lower_upper
#Add value to min(x) # Add value to min(x)
if bin_x_fit[0] > np.nanmin(x) or np.isnan(bin_y_fit[0]): if bin_x_fit[0] > np.nanmin(x) or np.isnan(bin_y_fit[0]):
if lower =='extrapolate': if lower == 'extrapolate':
bin_y_fit = np.r_[bin_y_fit[0] - (bin_x_fit[0] - np.nanmin(x)) * (bin_y_fit[1] - bin_y_fit[0]) / (bin_x_fit[1] - bin_x_fit[0]), bin_y_fit] bin_y_fit = np.r_[bin_y_fit[0] - (bin_x_fit[0] - np.nanmin(x)) *
(bin_y_fit[1] - bin_y_fit[0]) / (bin_x_fit[1] - bin_x_fit[0]), bin_y_fit]
bin_x_fit = np.r_[np.nanmin(x), bin_x_fit] bin_x_fit = np.r_[np.nanmin(x), bin_x_fit]
elif lower=="discard": elif lower == "discard":
pass pass
elif isinstance(lower, int): elif isinstance(lower, int):
bin_y_fit = np.r_[np.mean(y[~np.isnan(x)][np.argsort(x[~np.isnan(x)])[:lower]]), bin_y_fit] bin_y_fit = np.r_[np.mean(y[~np.isnan(x)][np.argsort(x[~np.isnan(x)])[:lower]]), bin_y_fit]
bin_x_fit = np.r_[np.nanmin(x), bin_x_fit] bin_x_fit = np.r_[np.nanmin(x), bin_x_fit]
else: else:
raise NotImplementedError("Argument for handling lower observations, %s, not implemented"%lower) raise NotImplementedError("Argument for handling lower observations, %s, not implemented" % lower)
#add value to max(x) # add value to max(x)
if bin_x_fit[-1] < np.nanmax(x) or np.isnan(bin_y_fit[-1]): if bin_x_fit[-1] < np.nanmax(x) or np.isnan(bin_y_fit[-1]):
if upper == 'extrapolate': if upper == 'extrapolate':
bin_y_fit = np.r_[bin_y_fit, bin_y_fit[-1] + (np.nanmax(x) - bin_x_fit[-1]) * (bin_y_fit[-1] - bin_y_fit[-2]) / (bin_x_fit[-1] - bin_x_fit[-2]) ] bin_y_fit = np.r_[bin_y_fit, bin_y_fit[-1] + (np.nanmax(x) - bin_x_fit[-1])
* (bin_y_fit[-1] - bin_y_fit[-2]) / (bin_x_fit[-1] - bin_x_fit[-2])]
bin_x_fit = np.r_[bin_x_fit, np.nanmax(x)] bin_x_fit = np.r_[bin_x_fit, np.nanmax(x)]
elif upper=="discard": elif upper == "discard":
pass pass
elif isinstance(upper, int): elif isinstance(upper, int):
bin_y_fit = np.r_[bin_y_fit, np.mean(y[~np.isnan(x)][np.argsort(x[~np.isnan(x)])[-upper:]])] bin_y_fit = np.r_[bin_y_fit, np.mean(y[~np.isnan(x)][np.argsort(x[~np.isnan(x)])[-upper:]])]
bin_x_fit = np.r_[bin_x_fit, np.nanmax(x)] bin_x_fit = np.r_[bin_x_fit, np.nanmax(x)]
else: else:
raise NotImplementedError("Argument for handling upper observations, %s, not implemented"%upper) raise NotImplementedError("Argument for handling upper observations, %s, not implemented" % upper)
return bin_x_fit, _interpolate_fit(bin_x_fit, bin_y_fit, kind) return bin_x_fit, _interpolate_fit(bin_x_fit, bin_y_fit, kind)
def perpendicular_bin_fit(x, y, bins = 30, fit_func=None, bin_min_count=3, plt=None):
def perpendicular_bin_fit(x, y, bins=30, fit_func=None, bin_min_count=3, plt=None):
"""Fit a curve to the values, (x,y) using bins that are perpendicular to an initial fit """Fit a curve to the values, (x,y) using bins that are perpendicular to an initial fit
Parameters Parameters
--------- ---------
x : array_like x : array_like
...@@ -124,77 +123,76 @@ def perpendicular_bin_fit(x, y, bins = 30, fit_func=None, bin_min_count=3, plt=N ...@@ -124,77 +123,76 @@ def perpendicular_bin_fit(x, y, bins = 30, fit_func=None, bin_min_count=3, plt=N
bin_min_count : int, optional bin_min_count : int, optional
Minimum number of observations in bins to include Minimum number of observations in bins to include
Default is 3 Default is 3
plt : pyplot or None plt : pyplot or None
If pyplot the fitting process is plotted on plt If pyplot the fitting process is plotted on plt
Returns Returns
------- -------
fit_x, fit_y fit_x, fit_y
""" """
if fit_func is None: if fit_func is None:
fit_func = lambda x,y : bin_fit(x, y, bins, bin_func=np.nanmean) def fit_func(x, y): return bin_fit(x, y, bins, bin_func=np.nanmean)
x,y = [v[~np.isnan(x)&~np.isnan(y)] for v in [x,y]] x, y = [v[~np.isnan(x) & ~np.isnan(y)] for v in [x, y]]
bfx,f = fit_func(x, y) bfx, f = fit_func(x, y)
bfy = f(bfx) bfy = f(bfx)
bfx, bfy = [v[~np.isnan(bfx) & ~np.isnan(bfy)] for v in [bfx, bfy]]
bfx, bfy = [v[~np.isnan(bfx)&~np.isnan(bfy)] for v in [bfx,bfy]]
if plt: if plt:
x_range, y_range = [v.max()-v.min() for v in [x,y]] x_range, y_range = [v.max() - v.min() for v in [x, y]]
plt.ylim([y.min()-y_range*.1, y.max()+y_range*.1]) plt.ylim([y.min() - y_range * .1, y.max() + y_range * .1])
plt.xlim([x.min()-x_range*.1, x.max()+x_range*.1]) plt.xlim([x.min() - x_range * .1, x.max() + x_range * .1])
# divide curve into N segments of same normalized curve length # divide curve into N segments of same normalized curve length
xg, xo = np.nanmax(bfx)-np.nanmin(bfx), np.nanmin(bfx) xg, xo = np.nanmax(bfx) - np.nanmin(bfx), np.nanmin(bfx)
yg, yo = np.nanmax(bfy)-np.nanmin(bfy), np.nanmin(bfy) yg, yo = np.nanmax(bfy) - np.nanmin(bfy), np.nanmin(bfy)
nbfx = (bfx-xo)/xg nbfx = (bfx - xo) / xg
nbfy = (bfy-yo)/yg nbfy = (bfy - yo) / yg
l = np.cumsum(np.sqrt(np.diff(nbfx)**2+np.diff(nbfy)**2)) l = np.cumsum(np.sqrt(np.diff(nbfx)**2 + np.diff(nbfy)**2))
nx, ny = [np.interp(np.linspace(l[0], l[-1], bins+1), l, (xy[1:]+xy[:-1])/2) for xy in [nbfx,nbfy]] nx, ny = [np.interp(np.linspace(l[0], l[-1], bins + 1), l, (xy[1:] + xy[:-1]) / 2) for xy in [nbfx, nbfy]]
last = (-1, 0)
last = (-1,0)
pc = [] pc = []
used = np.zeros_like(x).astype(np.bool) used = np.zeros_like(x).astype(bool)
for i in range(0,len(nx)): for i in range(0, len(nx)):
i1,i2 = max(0,i-1), min(len(nx)-1,i+1) i1, i2 = max(0, i - 1), min(len(nx) - 1, i + 1)
a =-(nx[i2]-nx[i1])/ (ny[i2]-ny[i1]) a = -(nx[i2] - nx[i1]) / (ny[i2] - ny[i1])
b = (ny[i]-(a*nx[i]))*yg+yo b = (ny[i] - (a * nx[i])) * yg + yo
a *=yg/xg a *= yg / xg
x_ = [np.nanmin(x), np.nanmax(x)] x_ = [np.nanmin(x), np.nanmax(x)]
m1 = np.sign(last[0])*y < np.sign(last[0])*((x-xo)*last[0]+last[1]) m1 = np.sign(last[0]) * y < np.sign(last[0]) * ((x - xo) * last[0] + last[1])
m2 = np.sign(a)*y>np.sign(a)*(a*(x-xo)+b) m2 = np.sign(a) * y > np.sign(a) * (a * (x - xo) + b)
m = m1&m2&~used m = m1 & m2 & ~used
if plt: if plt:
plt.plot(x_, ((a)*(x_-xo))+b) plt.plot(x_, ((a) * (x_ - xo)) + b)
plt.plot(x[m], y[m],'.') plt.plot(x[m], y[m], '.')
if np.sum(m)>=bin_min_count: if np.sum(m) >= bin_min_count:
pc.append((np.median(x[m]), np.median(y[m]))) pc.append((np.median(x[m]), np.median(y[m])))
used = used|m used = used | m
last = (a,b) last = (a, b)
#bfx,bfy = zip(*pc) #bfx,bfy = zip(*pc)
if plt: if plt:
pbfx, pbfy = np.array(pc).T pbfx, pbfy = np.array(pc).T
plt.plot(bfx,bfy, 'orange', label='initial_fit') plt.plot(bfx, bfy, 'orange', label='initial_fit')
plt.plot(pbfx, pbfy, 'gray', label="perpendicular fit") plt.plot(pbfx, pbfy, 'gray', label="perpendicular fit")
plt.legend() plt.legend()
#PlotData(None, bfx,bfy) #PlotData(None, bfx,bfy)
bin_x_fit, bin_y_fit = np.array(pc).T bin_x_fit, bin_y_fit = np.array(pc).T
return bin_x_fit, _interpolate_fit(bin_x_fit, bin_y_fit, kind="linear") return bin_x_fit, _interpolate_fit(bin_x_fit, bin_y_fit, kind="linear")
#Create mean function # Create mean function
def _interpolate_fit(bin_x_fit, bin_y_fit, kind='linear'): def _interpolate_fit(bin_x_fit, bin_y_fit, kind='linear'):
def fit(x): def fit(x):
x = np.atleast_1d(x)[:].copy().astype(np.float) x = np.atleast_1d(x)[:].copy().astype(float)
x[x<bin_x_fit[0]] = np.nan x[x < bin_x_fit[0]] = np.nan
x[x>bin_x_fit[-1]] = np.nan x[x > bin_x_fit[-1]] = np.nan
m = ~(np.isnan(bin_x_fit)|np.isnan(bin_y_fit)) m = ~(np.isnan(bin_x_fit) | np.isnan(bin_y_fit))
return interp1d(bin_x_fit[m], bin_y_fit[m], kind)(x[:]) return interp1d(bin_x_fit[m], bin_y_fit[m], kind)(x[:])
return fit return fit
...@@ -13,13 +13,13 @@ def fourier_fit(y, max_nfft, x=None): ...@@ -13,13 +13,13 @@ def fourier_fit(y, max_nfft, x=None):
return d, lambda deg : np.interp(deg%360, d, F2x(x2F(y, max_nfft, x))) return d, lambda deg : np.interp(deg%360, d, F2x(x2F(y, max_nfft, x)))
def fourier_fit_old(y, nfft): def fourier_fit_old(y, nfft):
F = np.zeros(len(y), dtype=np.complex) F = np.zeros(len(y), dtype=complex)
F[:nfft + 1] = x2F(y, nfft)[:nfft + 1] F[:nfft + 1] = x2F(y, nfft)[:nfft + 1]
return np.fft.ifft(F) * len(F) return np.fft.ifft(F) * len(F)
def F2x(F_coefficients): def F2x(F_coefficients):
"""Compute signal from Fourier coefficients""" """Compute signal from Fourier coefficients"""
F = np.zeros(360, dtype=np.complex) F = np.zeros(360, dtype=complex)
nfft = len(F_coefficients) // 2 nfft = len(F_coefficients) // 2
F[:nfft + 1] = np.conj(F_coefficients[:nfft + 1]) F[:nfft + 1] = np.conj(F_coefficients[:nfft + 1])
F[1:nfft + 1] += (F_coefficients[-nfft:][::-1]) F[1:nfft + 1] += (F_coefficients[-nfft:][::-1])
...@@ -53,7 +53,7 @@ def x2F(y, max_nfft, x=None): ...@@ -53,7 +53,7 @@ def x2F(y, max_nfft, x=None):
b[r] = 2 * np.nansum(y * np.sin(i * theta)) b[r] = 2 * np.nansum(y * np.sin(i * theta))
AB = np.linalg.solve(a, b) AB = np.linalg.solve(a, b)
F = np.zeros(n, dtype=np.complex) F = np.zeros(n, dtype=complex)
F = np.r_[AB[0], (AB[1:nfft + 1] + 1j * AB[nfft + 1:]), np.zeros(nfft) ] F = np.r_[AB[0], (AB[1:nfft + 1] + 1j * AB[nfft + 1:]), np.zeros(nfft) ]
return F return F
...@@ -73,5 +73,5 @@ def rF2x(rF): ...@@ -73,5 +73,5 @@ def rF2x(rF):
"""Convert single sided Fourier components, that satisfies x(t) = sum(X(cos(iw)+sin(iw)), i=0..N) to non-complex signal""" """Convert single sided Fourier components, that satisfies x(t) = sum(X(cos(iw)+sin(iw)), i=0..N) to non-complex signal"""
rF = np.conj(rF) rF = np.conj(rF)
rF[1:] /= 2 rF[1:] /= 2
rF = np.r_[rF, np.zeros(181 - len(rF), dtype=np.complex)] rF = np.r_[rF, np.zeros(181 - len(rF), dtype=complex)]
return np.fft.irfft(rF) * 360 return np.fft.irfft(rF) * 360