Fix subscript bug. g95 ignored it, but pgi insisted it be fixed.
[WPS.git] / util / regtest_wps.csh
blob3bbea9b3c9fb692038a1f390b73db0d6949e2b0f
1 #!/bin/csh
3 #BSUB -P 64000400 # proj account to charge
4 #BSUB -R "span[ptile=2]" # how many tasks per node (up to 8)
5 #BSUB -n 1 # number of total tasks
6 #BSUB -o WPS.out # output filename (%J to add job id)
7 #BSUB -e WPS.err # error filename
8 #BSUB -J WPS.test # job name
9 #BSUB -q share # queue
10 #BSUB -W 3:00 # wallclock time
12 ######## CHANGE THIS DIRECTORY #######
13 if ( `uname` == AIX ) then
14 if ( -d /ptmp/gill/WPS_reg ) then
15 cd /ptmp/gill/WPS_reg
16 else
17 echo "/ptmp/gill/WPS_reg does not exist - stopping"
18 exit 1
19 endif
20 endif
21 ######## CHANGE THIS DIRECTORY #######
23 unalias cp rm ls
24 unalias popd
26 if ( ( ! -d WPS ) || ( ! -d WRFV2 ) ) then
27 clear
28 echo " "
29 echo " "
30 echo "This test is run from a directory where"
31 echo "both WPS and WRFV2 exist"
32 echo " "
33 echo " "
34 exit ( 1 )
35 endif
37 set NUM_FRAMES = 12
38 set TOP_DIR = `pwd`
39 set PLOTS_ONLY = TRUE
40 set PLOTS_ONLY = FALSE
42 # WRFV2 build
44 clear
46 if ( $PLOTS_ONLY == FALSE ) then
48 if ( `uname` == Linux ) then
49 echo 1. starting WRFV2 build pgf90 - takes about 7 minutes
50 else if ( `uname` == AIX ) then
51 echo "1. starting WRFV2 build - takes 20 (bs) to 35 (bv) minutes"
52 else if ( `uname` == Darwin ) then
53 echo "1. starting WRFV2 build g95 - takes 30 minutes"
54 else
55 echo 1. starting WRFV2 build
56 endif
57 echo " start: " `date`
58 pushd WRFV2 >& /dev/null
60 # We at least want to be able to do nesting.
62 if ( `uname` == Linux ) then
63 echo 2 | ./configure >& /dev/null
64 else if ( `uname` == AIX ) then
65 ./configure << EOF >& /dev/null
68 EOF
69 else if ( `uname` == Darwin ) then
70 ./configure << EOF >& /dev/null
73 EOF
74 else
75 echo need info on this `uname` arch
76 exit
77 endif
79 ./compile em_real >&! build.log
80 if ( ( -e main/wrf.exe ) && \
81 ( -e main/real.exe ) ) then
82 echo " WRFV2 build OK"
83 else
84 echo " "
85 echo " "
86 echo "WRFV2 build failed"
87 echo "Look at $TOP_DIR/WRFV2/build.log"
88 echo " "
89 echo " "
90 exit ( 2 )
91 endif
92 echo " end: " `date`
93 popd >& /dev/null
95 # WPS build
97 echo " "
98 echo 2. starting WPS build - takes about 1 minute
99 echo " start: " `date`
100 pushd WPS >& /dev/null
101 echo 2 | ./configure >& /dev/null
102 ./compile wps >&! build.log
103 if ( ( -e geogrid.exe ) && \
104 ( -e metgrid.exe ) && \
105 ( -e ungrib.exe ) ) then
106 echo " WPS build OK"
107 else
108 echo " "
109 echo " "
110 echo "WPS build failed"
111 echo "Look at $TOP_DIR/WPS/build.log"
112 echo " "
113 echo " "
114 exit ( 3 )
115 endif
116 echo " end: " `date`
117 popd >& /dev/null
119 endif
121 # WPS TESTS
123 echo " "
124 echo 3. starting WPS tests
125 echo " start: " `date`
126 pushd WPS >& /dev/null
127 set tcount = 0
129 # Loop over all of the tests
131 if ( ${#argv} == 0 ) then
132 set all_tests = ( `ls -1 test_suite` )
133 else
134 set all_tests = ( $* )
135 endif
136 foreach test_num ( $all_tests )
138 # Set the env from the helpful env file
140 source test_suite/${test_num}/the_env
142 # Copy in the WPS namelist.
144 cp test_suite/${test_num}/namelist.wps .
146 # Tell folks what we are doing.
148 @ tcount ++
149 echo " Test #${tcount}"
150 echo " share: " $share
151 echo " geogrid: " $geogrid
152 echo " ungrib: " $ungrib
153 echo " metgrid: " $metgrid
155 # Set the path to the static geog data in the namelist.
157 if ( -d /bouleau/users/duda/GEOG ) then
158 # Michael is a no-op
159 else if ( -d /standalone/users/gill/DATA/GEOG ) then
160 echo ' geog_data_path = "/standalone/users/gill/DATA/GEOG"' >! foodir
161 else if ( -d /data3/mp/wrfhelp/WPS_GEOG ) then
162 echo ' geog_data_path = "/data3/mp/wrfhelp/WPS_GEOG"' >! foodir
163 else if ( -d /mmm/users/wrfhelp/WPS_GEOG ) then
164 echo ' geog_data_path = "/mmm/users/wrfhelp/WPS_GEOG"' >! foodir
165 else
166 echo " "
167 echo " "
168 echo "Could not find the static data for geogrid"
169 echo " "
170 echo " "
171 exit ( 4 )
172 endif
173 sed -e '/geog_data_path/d' namelist.wps >! .foo
174 sed -e '/stand_lon/r foodir' .foo >! namelist.wps
176 # Clean up the ol temp file we made.
178 rm .foo
180 if ( $PLOTS_ONLY == FALSE ) then
181 # Echo what is happening for each program, also there is
182 # start/ending couplet so that approx timings are possible.
184 echo " geogrid.exe share=${share} geogrid=$geogrid"
185 echo " start: " `date`
186 geogrid.exe >&! geogrid.print.share=${share}.geogrid=$geogrid
187 grep -i success geogrid.print.share=${share}.geogrid=$geogrid >& /dev/null
188 set ok = $status
189 if ( $ok != 0 ) then
190 echo " "
191 echo " "
192 echo "Failed to run geogrid.exe"
193 echo " "
194 echo " "
195 exit ( 5 )
196 endif
197 echo " end: " `date`
198 endif
200 # Now we slide over to the ungrib and metgrid programs. We need to get the
201 # location of the met data. This is not a nml var, but an argument
202 # to a script.
204 if ( -d /standalone/users/gill/DATA/WPS_regression_data ) then
205 set datadir = /standalone/users/gill/DATA/WPS_regression_data
206 else if ( -d /stink/gill/Regression_Tests/WPS_regression_data ) then
207 set datadir = /stink/gill/Regression_Tests/WPS_regression_data
208 else if ( -d /data3a/mp/gill/DATA/WPS_regression_data ) then
209 set datadir = /data3a/mp/gill/DATA/WPS_regression_data
210 else if ( -d /mmm/users/gill/DATA/WPS_regression_data ) then
211 set datadir = /mmm/users/gill/DATA/WPS_regression_data
212 else
213 echo " "
214 echo " "
215 echo "Could not find the met data for ungrib.exe"
216 echo " "
217 echo " "
218 exit ( 6 )
219 endif
221 # List of all of the input data (met) files. The g1/g2 means Grib Edition
222 # 1 vs Edition 2. Each has the associated Vtable, where the difference
223 # between dir and the Vtable name is the extra "_g1" or "_g2".
225 set source = ( `ls -1 $datadir/$test_num/DATA` )
227 # Loop of the data sources, one run each of ungrib and one of metgrid.
229 foreach data_source ( $source )
231 # The sources are just directories in the TEST_001, etc dirs. Also
232 # in there is a namelist file. That is not a valid source of data,
233 # so we skip it an move on.
235 if ( ( $data_source == namelist.input ) || ( $data_source == rip_test.in ) ) then
236 goto skipped_namelist_as_a_directory
237 endif
239 # The incremented counter, to keep track of which data source
240 # we are running, and so that we are using the right Vtable.
242 set Vtable = `echo $data_source | cut -d "_" -f1`
243 cp ungrib/Variable_Tables/Vtable.$Vtable Vtable
244 ./link_grib.csh $datadir/$test_num/DATA/$data_source/*
246 if ( $PLOTS_ONLY == FALSE ) then
248 # Run ungrib, the grib decoder.
250 echo " ungrib.exe share=${share} ungrib=$ungrib source=$data_source"
251 echo " start: " `date`
252 ungrib.exe >&! ungrib.print.share=${share}.ungrib=${ungrib}.source=$data_source
253 grep -i success ungrib.print.share=${share}.ungrib=$ungrib.source=$data_source >& /dev/null
254 # grep -i Bandimere ungrib.print.share=${share}.ungrib=$ungrib.source=$data_source >& /dev/null
255 set ok = $status
256 if ( $ok != 0 ) then
257 echo " "
258 echo " "
259 echo "Failed to run ungrib.exe"
260 echo " "
261 echo " "
262 exit ( 6 )
263 endif
264 echo " end: " `date`
266 # Now with the geogrid and ungrib progs done, we can move to metgrid.
268 echo " metgrid.exe share=${share} metgrid=${metgrid} source=$data_source"
269 echo " start: " `date`
270 metgrid.exe >&! metgrid.print.share=${share}.metgrid=${metgrid}.source=$data_source
271 grep -i success metgrid.print.share=${share}.metgrid=${metgrid}.source=$data_source >& /dev/null
272 set ok = $status
273 if ( $ok != 0 ) then
274 echo " "
275 echo " "
276 echo "Failed to run metgrid.exe"
277 echo " "
278 echo " "
279 exit ( 7 )
280 endif
281 echo " end: " `date`
283 # Save the data.
285 if ( ! -d TEMPORARY_STORAGE ) then
286 mkdir TEMPORARY_STORAGE
287 endif
288 if ( -d TEMPORARY_STORAGE/${test_num}.source=${data_source} ) then
289 rm -rf TEMPORARY_STORAGE/${test_num}.source=${data_source}
290 endif
291 mkdir TEMPORARY_STORAGE/${test_num}.source=${data_source}
292 mv FILE* TEMPORARY_STORAGE/${test_num}.source=${data_source}
293 mv met_* TEMPORARY_STORAGE/${test_num}.source=${data_source}
295 # ## Run the real/wrf combo on this data generated. ##
297 # Get to the WRF dir, just up and over a bit.
299 pushd ../WRFV2/test/em_real >& /dev/null
301 # We need the data we just made from metgrid to be the input for real.
303 ln -sf ../../../WPS/TEMPORARY_STORAGE/${test_num}.source=${data_source}/met_* .
305 # Manufacture the namelist. A template is in the data dir, just edit the
306 # number of metgrid levels.
308 cp $datadir/$test_num/namelist.input namelist.input.template
309 set NUM_METGRID_LEVELS = `ncdump -h met_em.d02.* | grep -i num_metgrid_levels | grep = | awk '{print $3}'`
310 m4 -DNUM_METGRID_LEVELS=${NUM_METGRID_LEVELS} namelist.input.template >! namelist.input
312 # The real portion.
314 echo " real.exe share=${share} metgrid=${metgrid} source=$data_source"
315 echo " start: " `date`
316 real.exe >&! real.print.share=${share}.metgrid=${metgrid}.source=$data_source
317 grep -i success real.print.share=${share}.metgrid=${metgrid}.source=$data_source >& /dev/null
318 set ok = $status
319 if ( $ok != 0 ) then
320 echo " "
321 echo " "
322 echo "Failed to run real.exe"
323 echo " "
324 echo " "
325 exit ( 8 )
326 endif
327 echo " end: " `date`
329 # The wrf portion.
331 echo " wrf.exe share=${share} metgrid=${metgrid} source=$data_source"
332 echo " start: " `date`
333 wrf.exe >&! wrf.print.share=${share}.metgrid=${metgrid}.source=$data_source
334 grep -i success wrf.print.share=${share}.metgrid=${metgrid}.source=$data_source >& /dev/null
335 set ok = $status
336 if ( $ok != 0 ) then
337 echo " "
338 echo " "
339 echo "Failed to run wrf.exe"
340 echo " "
341 echo " "
342 exit ( 9 )
343 endif
344 echo " end: " `date`
346 # Save the model IC, BC and forecast data.
348 if ( ! -d TEMPORARY_STORAGE ) then
349 mkdir TEMPORARY_STORAGE
350 endif
351 if ( -d TEMPORARY_STORAGE/${test_num}.source=${data_source} ) then
352 rm -rf TEMPORARY_STORAGE/${test_num}.source=${data_source}
353 endif
354 mkdir TEMPORARY_STORAGE/${test_num}.source=${data_source}
355 mv wrfi* wrfb* wrfo* namelist.input TEMPORARY_STORAGE/${test_num}.source=${data_source}
356 rm met_*
358 # Get back out to the WPS dir.
360 popd >& /dev/null
362 endif
364 #######
366 pushd ../WRFV2/test/em_real >& /dev/null
368 # Handle the plots with RIP.
370 echo " ripdp_wrfarw and rip share=${share} metgrid=${metgrid} source=$data_source"
371 echo " start: " `date`
372 if ( ! -d RIP ) then
373 mkdir RIP
374 endif
375 rm -rf RIP/${test_num}_${data_source}* >& /dev/null
376 ripdp_wrfarw RIP/${test_num}_${data_source} \
377 all \
378 TEMPORARY_STORAGE/${test_num}.source=${data_source}/wrfo* >&! \
379 ripdp_wrfarw.print.share=${share}.metgrid=${metgrid}.source=$data_source
380 grep -i vladimir ripdp_wrfarw.print.share=${share}.metgrid=${metgrid}.source=$data_source >& /dev/null
381 set ok = $status
382 if ( $ok != 0 ) then
383 echo " "
384 echo " "
385 echo "Failed to run ripdp_wrfarw"
386 echo " "
387 echo " "
388 exit ( 10 )
389 endif
391 # ... and RIP.
393 cp ${datadir}/${test_num}/rip_test.in test.in
394 rip RIP/${test_num}_${data_source} test.in >&! \
395 rip.print.share=${share}.metgrid=${metgrid}.source=$data_source
396 grep -i vladimir rip.print.share=${share}.metgrid=${metgrid}.source=$data_source >& /dev/null
397 set ok = $status
398 if ( $ok != 0 ) then
399 echo " "
400 echo " "
401 echo "Failed to run rip"
402 echo " "
403 echo " "
404 exit ( 11 )
405 endif
407 # Split plots into CGM pieces.
409 if ( -e med.info ) then
410 rm med.info
411 endif
412 touch med.info
414 set count = 0
416 echo "read test.cgm" >> med.info
417 while ( $count < $NUM_FRAMES )
418 @ count ++
419 if ( $count < 10 ) then
420 set index = 0$count
421 else
422 set index = $count
423 endif
424 echo "${index} , ${index} w plot${index}.cgm" >> med.info
426 echo "quit" >> med.info
428 # Run med to split frames.
430 med -f med.info >& /dev/null
432 # Convert to a more traditional form, we like gif right now.
434 set count = 0
435 while ( $count < $NUM_FRAMES )
436 @ count ++
437 if ( $count < 10 ) then
438 set index = 0$count
439 else
440 set index = $count
441 endif
442 ctrans -d sun plot${index}.cgm > plot${index}.ras
443 convert plot${index}.ras ${test_num}_${data_source}_${index}.gif
444 convert ${test_num}_${data_source}_${index}.gif -resize 10% \
445 ${test_num}_${data_source}_${index}_small.gif
448 mv test.cgm plot*.cgm plot*.ras *.gif \
449 TEMPORARY_STORAGE/${test_num}.source=${data_source}
450 rm med.info
452 # Build singleton web pages for each image
454 set count = 0
455 while ( $count < $NUM_FRAMES )
456 @ count ++
457 if ( $count < 10 ) then
458 set index = 0$count
459 else
460 set index = $count
461 endif
462 cat >&! TEMPORARY_STORAGE/${test_num}.source=${data_source}/plot_${index}.html << EOF
463 <HTML>
464 <BODY>
465 <img src="${test_num}_${data_source}_${index}.gif">
466 </BODY>
467 </HTML>
471 popd >& /dev/null
473 # Put the pre-built web page on top of the WRF fcst plots.
475 if ( ! -e ../WRFV2/test/em_real/TEMPORARY_STORAGE/wps_reg.html ) then
476 cp util/wps_reg.html ../WRFV2/test/em_real/TEMPORARY_STORAGE
477 endif
478 echo " end: " `date`
480 #########
481 skipped_namelist_as_a_directory:
485 # Save the static data for this location.
487 if ( $PLOTS_ONLY == FALSE ) then
488 if ( -d TEMPORARY_STORAGE/${test_num}.location ) then
489 rm -rf TEMPORARY_STORAGE/${test_num}.location
490 endif
491 mkdir TEMPORARY_STORAGE/${test_num}.location
492 mv geo_* TEMPORARY_STORAGE/${test_num}.location
493 endif
495 echo " end: " `date`
496 popd >& /dev/null