-
Notifications
You must be signed in to change notification settings - Fork 0
/
copy_data.run
executable file
·172 lines (136 loc) · 6.38 KB
/
copy_data.run
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
#!/bin/bash
# Cd into our working directory in case we're not into it already
cd "$(dirname "$0")";
echo "----------------------------------------------------------------------------------------------"
echo "icon-globe: Starting processing of icon model data - `date`"
echo "----------------------------------------------------------------------------------------------"
# Folder to be used to download and process data
export MODEL_DATA_FOLDER="/home/ekman/ssd/guido/icon-globe/"
export HOME_FOLDER=$(pwd)
export N_CONCUR_PROCESSES=12
export NCFTP_BOOKMARK="mid"
export REMAP=true
DATA_DOWNLOAD=true
DATA_PLOTTING=true
DATA_UPLOAD=true
##### LOAD functions to download model data
. ./functions_download_dwd.sh
export SHELL=$(type -p bash)
# We need to open many files at the same time
ulimit -Sn 8192
#
###########################################
# Retrieve run ##########################
latest_run=`python get_last_run.py`
if [ -f $MODEL_DATA_FOLDER/last_processed_run.txt ]; then
latest_processed_run=`while read line; do echo $line; done < $MODEL_DATA_FOLDER/last_processed_run.txt`
if [ $latest_run -gt $latest_processed_run ]; then
echo "New run ${latest_run} found! Last processed run was ${latest_processed_run}."
else
echo "Run ${latest_processed_run} already processed, exiting"
exit 0
fi
fi
export year=${latest_run:0:4}
export month=${latest_run:4:2}
export day=${latest_run:6:2}
export run=${latest_run:8:2}
#############################################
mkdir -p ${MODEL_DATA_FOLDER}world
mkdir -p ${MODEL_DATA_FOLDER}remap
mkdir -p ${MODEL_DATA_FOLDER}us
mkdir -p ${MODEL_DATA_FOLDER}euratl
mkdir -p ${MODEL_DATA_FOLDER}it
mkdir -p ${MODEL_DATA_FOLDER}de
mkdir -p ${MODEL_DATA_FOLDER}nh_polar
# Move to the data folder to do processing
cd ${MODEL_DATA_FOLDER} || { echo 'Cannot change to DATA folder' ; exit 1; }
# SECTION 1 - DATA DOWNLOAD ############################################################
if [ "$DATA_DOWNLOAD" = true ]; then
echo "----------------------------------------------------------------------------------------------"
echo "icon-globe: Starting downloading of data - `date`"
echo "----------------------------------------------------------------------------------------------"
# Remove older files
rm ${MODEL_DATA_FOLDER}*.nc
rm ${MODEL_DATA_FOLDER}remap/*.nc
# Invariant
download_invariant_icon_globe
export hours_download=$(seq -s "|" -f "%03g" 0 3 180)
#2-D variables
variables=("PMSL" "CLCT" "TOT_PREC" "VMAX_10M" "RAIN_CON"\
"RAIN_GSP" "SNOW_CON" "SNOW_GSP" "T_2M" "TD_2M" "WW"\
"U_10M" "V_10M" "H_SNOW")
parallel -j 8 --delay 1 download_merge_2d_variable_icon_globe ::: "${variables[@]}"
#3-D variables on pressure levels
variables=("T" "FI" "RELHUM" "U" "V")
parallel -j 8 --delay 1 download_merge_3d_variable_icon_globe ::: "${variables[@]}"
fi
# SECTION 2 - DATA PLOTTING ############################################################
if [ "$DATA_PLOTTING" = true ]; then
echo "----------------------------------------------------------------------------------------------"
echo "icon-globe: Starting plotting of data - `date`"
echo "----------------------------------------------------------------------------------------------"
python --version
cp ${HOME_FOLDER}/plotting/*.py ${MODEL_DATA_FOLDER}
export QT_QPA_PLATFORM=offscreen # Needed to avoid errors when using Python without display
python plot_meteogram_remap.py Hamburg Storrs Milan Pisa Toulouse Rome Naples Utrecht Sassari Palermo Cagliari
scripts=("plot_jetstream_remap.py" "plot_rain_acc_remap.py" "plot_clouds_remap.py" "plot_geop_500_remap.py"\
"plot_mslp_wind_remap.py" "plot_thetae_remap.py" "plot_geop_50_remap.py" \
"plot_winter_remap.py")
projections=("euratl" "nh" "nh_polar" "us" "world" "it" "de")
parallel -j 3 --delay 5 python ::: "${scripts[@]}" ::: "${projections[@]}"
rm ${MODEL_DATA_FOLDER}/*.py
fi
############################################################
# If we at least produced some netcdf and images we assume the run is succesfull and
# store the run processed in a text file. Unfortunately there is no easy way to check
# if the processing is really succesfull as we have many errors in the process that
# don't undermine the whole processing!
N_NETCDF_FILES=`find . -type f -name '*.nc' -printf x | wc -c`
N_IMAGES=`find . -type f -name '*.png' -printf x | wc -c`
if [ $N_NETCDF_FILES -ge 2 ] && [ $N_IMAGES -ge 10 ]; then
echo ${latest_run} > last_processed_run.txt
fi
# SECTION 3 - IMAGES UPLOAD ############################################################
# Use ncftpbookmarks to add a new FTP server with credentials
if [ "$DATA_UPLOAD" = true ]; then
echo "----------------------------------------------------------------------------------------------"
echo "icon-globe: Starting FTP uploading - `date`"
echo "----------------------------------------------------------------------------------------------"
# First upload meteograms
ncftpput -R -v -DD -m ${NCFTP_BOOKMARK} icon_globe/meteograms meteogram_*
#
# Then upload the other pictures
#
images_output=("thetae_850" "gph_500" "winds10m" "precip_clouds" "winds_jet" "precip_acc" "pv_250" "gpt_50" "winter")
# suffix for naming
projections_output=("euratl/" "" "nh_polar/" "world/" "us/" "it/" "de/")
# remote folder on server
projections_output_folder=("icon_euratl" "icon_globe" "icon_nh_polar" "icon_world" "icon_us" "icon_g_it" "icon_g_de")
# Create a lisf of all the images to upload
upload_elements=()
for i in "${!projections_output[@]}"; do
for j in "${images_output[@]}"; do
upload_elements+=("${projections_output_folder[$i]}/${j} ./${projections_output[$i]}${j}_*")
done
done
# Finally upload the images
# for k in "${upload_elements[@]}"; do
# ncftpput -R -v -DD -m ${NCFTP_BOOKMARK} ${k}
# done
num_procs=6
num_iters=${#upload_elements[@]}
num_jobs="\j" # The prompt escape for number of jobs currently running
for ((i=0; i<num_iters; i++)); do
while (( ${num_jobs@P} >= num_procs )); do
wait -n
done
ncftpput -R -v -DD -m ${NCFTP_BOOKMARK} ${upload_elements[$i]} &
done
fi
# SECTION 4 - CLEANING ############################################################
echo "----------------------------------------------------------------------------------------------"
echo "icon-globe: Finished cleaning up - `date`"
echo "----------------------------------------------------------------------------------------------"
############################################################
cd -