3 !----------------------------------------------------------------------
4 ! Purpose: Scatter global hires. control variables to different PEs
6 ! Input : vp_hires.bin -- high resolution global control variables
8 ! Output : vp_XXXX -- high resolution local control variables
10 ! In order to keep the domain size, it needs to match ( n - 1 )*ratio + 1
12 ! where n is the grid number in x or y
13 ! ratio is the refinement ratio between two resulotions
15 ! liuz@ucar.edu , 2016-05, NCAR/MMM
16 !----------------------------------------------------------------------
22 integer :: i
, j
, k
, n
, status
24 INTEGER :: ntasks_x
, ntasks_y
, mytask
, mytask_x
, mytask_y
25 INTEGER :: new_local_comm
, local_communicator
26 INTEGER, DIMENSION(2) :: dims
, coords
27 LOGICAL, DIMENSION(2) :: isperiodic
28 INTEGER :: ids
, ide
, jds
, jde
, kds
, kde
, &
29 ips
, ipe
, jps
, jpe
, kps
, kpe
35 character (len
= 255) :: vp_hires
36 character (len
= 255) :: arg
= ""
37 integer, parameter :: vp_unit
= 8
41 real, dimension(:,:,:), allocatable
:: v1
, v2
, v3
, v4
, v5
42 real, dimension(:,:,:), allocatable
:: v6
, v7
, v8
, v9
, v10
, v11
43 real, dimension(:,:,:), allocatable
:: v1l
, v2l
, v3l
, v4l
, v5l
44 real, dimension(:,:,:), allocatable
:: v6l
, v7l
, v8l
, v9l
, v10l
, v11l
47 integer :: cloud_cv_options
! 2 or 3 with cloud cv variables
48 integer :: use_cv_w
! =1 for w control variable
50 LOGICAL :: file_exists
53 !------------------------------
54 ! read program arguments
55 !------------------------------
58 read(arg
, '(i3)') cloud_cv_options
62 read(arg
, '(i3)') use_cv_w
64 write (*, *) 'cloud_cv_options = ', cloud_cv_options
, &
65 'use_cv_w = ', use_cv_w
67 !---------------------------------------------------------------------
69 !---------------------------------------------------------------------
71 call MPI_COMM_SIZE(MPI_COMM_WORLD
, size
, ierror
)
72 call MPI_COMM_RANK(MPI_COMM_WORLD
, mytask
, ierror
)
74 call MPASPECT( size
, ntasks_x
, ntasks_y
, 1, 1 )
75 if ( mytask
== 0 ) WRITE( * , * )'Ntasks in X ',ntasks_x
,', ntasks in Y ',ntasks_y
77 new_local_comm
= MPI_COMM_WORLD
78 dims(1) = ntasks_y
! rows
79 dims(2) = ntasks_x
! columns
80 isperiodic(1) = .false
.
81 isperiodic(2) = .false
.
82 CALL mpi_cart_create( new_local_comm
, 2, dims
, isperiodic
, .false
., local_communicator
, ierror
)
83 CALL mpi_comm_rank( local_communicator
, mytask
, ierror
)
84 CALL mpi_cart_coords( local_communicator
, mytask
, 2, coords
, ierror
)
85 mytask_x
= coords(2) ! col task (x)
86 mytask_y
= coords(1) ! row task (y)
87 !write (*,*) "The coords of task ",mytask, " is ",mytask_x,mytask_y
91 vp_hires
='vp_output.global_hires'
92 inquire(FILE
=trim(vp_hires
), EXIST
=file_exists
)
94 if ( .not
. file_exists
) then
95 Write(*,*) "\nError: "//trim(vp_hires
)//" not exists\n"
99 open(unit
=vp_unit
,file
=trim(vp_hires
),iostat
=io_status
,form
='UNFORMATTED',status
='OLD')
100 if (io_status
/= 0) then
101 write(*,*) "Error ",io_status
," opening vp file "//trim(vp_hires
)
104 if ( mytask
== 0 ) write(*,*) 'Reading vp from : '//trim(vp_hires
)
105 read(vp_unit
) ide
, jde
, kde
! domain dimension (unstagered)
106 ide
= ide
+ 1 ! WRF parallel decomposition is based on stagered grid
109 if ( mytask
== 0 ) write(*,*) 'ide, jde, kde = ', ide
, jde
, kde
114 !---------------------------------------------------------------------
115 ! Calculate the domain decomposition
116 !---------------------------------------------------------------------
117 CALL compute_memory_dims_rsl_lite ( 0 , &
118 ids
, ide
, jds
, jde
, kds
, kde
, &
119 ips
, ipe
, jps
, jpe
, kps
, kpe
)
120 ! convert to A-grid and middle levels on which control variables sit
121 if ( ipe
== ide
) ipe
= ipe
- 1
122 if ( jpe
== jde
) jpe
= jpe
- 1
123 if ( kpe
== kde
) kpe
= kpe
- 1
124 !WRITE(*,*)'*************************************'
125 !WRITE(90,*)'local ',ips,ipe,jps,jpe,kps,kpe
126 WRITE(*,*)'local ',ips
,ipe
,jps
,jpe
,kps
,kpe
127 !WRITE(*,*)'*************************************'
129 !---------------------------------------------------------------------
130 ! allocate global vp variables (unstagered)
131 !---------------------------------------------------------------------
132 allocate ( v1(ids
:ide
-1,jds
:jde
-1,kds
:kde
-1) )
133 allocate ( v2(ids
:ide
-1,jds
:jde
-1,kds
:kde
-1) )
134 allocate ( v3(ids
:ide
-1,jds
:jde
-1,kds
:kde
-1) )
135 allocate ( v4(ids
:ide
-1,jds
:jde
-1,kds
:kde
-1) )
136 allocate ( v5(ids
:ide
-1,jds
:jde
-1,kds
:kde
-1) )
138 if ( cloud_cv_options
>= 2 ) then
139 allocate ( v6(ids
:ide
-1,jds
:jde
-1,kds
:kde
-1) )
140 allocate ( v7(ids
:ide
-1,jds
:jde
-1,kds
:kde
-1) )
141 allocate ( v8(ids
:ide
-1,jds
:jde
-1,kds
:kde
-1) )
142 allocate ( v9(ids
:ide
-1,jds
:jde
-1,kds
:kde
-1) )
143 allocate ( v10(ids
:ide
-1,jds
:jde
-1,kds
:kde
-1) )
146 if ( use_cv_w
== 1 ) allocate ( v11(ids
:ide
-1,jds
:jde
-1,kds
:kde
-1) )
148 read(vp_unit
) v1
, v2
, v3
, v4
, v5
149 if ( cloud_cv_options
>= 2 )read(vp_unit
) v6
, v7
, v8
, v9
, v10
150 if ( use_cv_w
== 1 )read(vp_unit
) v11
153 call MPI_BARRIER(MPI_COMM_WORLD
,ierror
)
154 if ( mytask
== 0 ) write(*,*) 'Reading vp from : '//trim(vp_hires
)//' is completeed'
156 !---------------------------------------------------------------------
157 ! allocate local vp variables (unstagered)
158 !---------------------------------------------------------------------
163 allocate ( v1l(1:ix
,1:jy
,1:kz
) )
164 allocate ( v2l(1:ix
,1:jy
,1:kz
) )
165 allocate ( v3l(1:ix
,1:jy
,1:kz
) )
166 allocate ( v4l(1:ix
,1:jy
,1:kz
) )
167 allocate ( v5l(1:ix
,1:jy
,1:kz
) )
169 if ( cloud_cv_options
>= 2 ) then
170 allocate ( v6l(1:ix
,1:jy
,1:kz
) )
171 allocate ( v7l(1:ix
,1:jy
,1:kz
) )
172 allocate ( v8l(1:ix
,1:jy
,1:kz
) )
173 allocate ( v9l(1:ix
,1:jy
,1:kz
) )
174 allocate ( v10l(1:ix
,1:jy
,1:kz
) )
177 if ( use_cv_w
== 1 ) allocate ( v11l(1:ix
,1:jy
,1:kz
) )
179 !---------------------------------------------------------------------
181 !---------------------------------------------------------------------
183 v1l(1:ix
,1:jy
,1:kz
) = v1(ips
:ipe
,jps
:jpe
,kps
:kpe
)
184 v2l(1:ix
,1:jy
,1:kz
) = v2(ips
:ipe
,jps
:jpe
,kps
:kpe
)
185 v3l(1:ix
,1:jy
,1:kz
) = v3(ips
:ipe
,jps
:jpe
,kps
:kpe
)
186 v4l(1:ix
,1:jy
,1:kz
) = v4(ips
:ipe
,jps
:jpe
,kps
:kpe
)
187 v5l(1:ix
,1:jy
,1:kz
) = v5(ips
:ipe
,jps
:jpe
,kps
:kpe
)
189 if ( cloud_cv_options
>= 2 ) then
190 v6l(1:ix
,1:jy
,1:kz
) = v6(ips
:ipe
,jps
:jpe
,kps
:kpe
)
191 v7l(1:ix
,1:jy
,1:kz
) = v7(ips
:ipe
,jps
:jpe
,kps
:kpe
)
192 v8l(1:ix
,1:jy
,1:kz
) = v8(ips
:ipe
,jps
:jpe
,kps
:kpe
)
193 v9l(1:ix
,1:jy
,1:kz
) = v9(ips
:ipe
,jps
:jpe
,kps
:kpe
)
194 v10l(1:ix
,1:jy
,1:kz
) = v10(ips
:ipe
,jps
:jpe
,kps
:kpe
)
197 if ( use_cv_w
== 1 ) v11l(1:ix
,1:jy
,1:kz
) = v11(ips
:ipe
,jps
:jpe
,kps
:kpe
)
199 write (vp_hires
,'(A,i4.4)') "vp_input.",mytask
201 open(unit
=vp_unit
,file
=trim(vp_hires
),iostat
=io_status
,form
='UNFORMATTED',status
='UNKNOWN')
202 if (io_status
/= 0) then
203 write(*,*) "Error ",io_status
," opening vp file "//trim(vp_hires
)
206 write(*,*) 'Writting vp on hires to : '//trim(vp_hires
)
207 write(vp_unit
) ips
, ipe
, jps
, jpe
, kps
, kpe
208 write(vp_unit
) v1l
, v2l
, v3l
, v4l
, v5l
209 if ( cloud_cv_options
>= 2 )write(vp_unit
) v6l
, v7l
, v8l
, v9l
, v10l
210 if ( use_cv_w
== 1 )write(vp_unit
) v11l
211 !write(*,*) 'Sample of cvt :',mytask, maxval(cvt), minval(cvt)
214 !---------------------------------------------------------------------
216 !---------------------------------------------------------------------
217 !if ( mytask == 0 ) then
229 if ( cloud_cv_options
>= 2 ) then
242 if ( use_cv_w
== 1 ) then
248 call MPI_BARRIER(MPI_COMM_WORLD
,ierror
)
249 if ( mytask
== 0 ) Write(*,*) "Distributting control variables completed successfully"
250 call MPI_FINALIZE(ierror
)
254 SUBROUTINE MPASPECT( P
, MINM
, MINN
, PROCMIN_M
, PROCMIN_N
)
256 INTEGER P
, M
, N
, MINI
, MINM
, MINN
, PROCMIN_M
, PROCMIN_N
, ierror
261 IF ( MOD( P
, M
) .EQ
. 0 ) THEN
263 IF ( ABS(M
-N
) .LT
. MINI
&
264 .AND
. M
.GE
. PROCMIN_M
&
265 .AND
. N
.GE
. PROCMIN_N
&
273 IF ( MINM
.LT
. PROCMIN_M
.OR
. MINN
.LT
. PROCMIN_N
) THEN
274 WRITE( * , * )'MPASPECT: UNABLE TO GENERATE PROCESSOR MESH. STOPPING.'
275 WRITE( * , * )' PROCMIN_M ', PROCMIN_M
276 WRITE( * , * )' PROCMIN_N ', PROCMIN_N
277 WRITE( * , * )' P ', P
278 WRITE( * , * )' MINM ', MINM
279 WRITE( * , * )' MINN ', MINN
280 call MPI_FINALIZE(ierror
)
284 END SUBROUTINE MPASPECT
286 SUBROUTINE compute_memory_dims_rsl_lite ( &
288 ids
, ide
, jds
, jde
, kds
, kde
, &
289 ips
, ipe
, jps
, jpe
, kps
, kpe
)
292 INTEGER, INTENT(IN
) :: shw
293 INTEGER, INTENT(IN
) :: ids
, ide
, jds
, jde
, kds
, kde
294 INTEGER, INTENT(OUT
) :: ips
, ipe
, jps
, jpe
, kps
, kpe
296 INTEGER Px
, Py
, P
, i
, j
, k
, ierr
304 CALL task_for_point ( i
, j
, ids
, ide
, jds
, jde
, ntasks_x
, ntasks_y
, Px
, Py
, &
306 IF ( ierr
.NE
. 0 ) stop 'error code returned by task_for_point '
307 IF ( Px
.EQ
. mytask_x
) THEN
309 IF ( ips
.EQ
. -1 ) ips
= i
312 ! handle setting the memory dimensions where there are no X elements assigned to this proc
313 IF (ips
.EQ
. -1 ) THEN
321 CALL task_for_point ( i
, j
, ids
, ide
, jds
, jde
, ntasks_x
, ntasks_y
, Px
, Py
, &
323 IF ( ierr
.NE
. 0 ) stop 'error code returned by task_for_point '
324 IF ( Py
.EQ
. mytask_y
) THEN
326 IF ( jps
.EQ
. -1 ) jps
= j
329 ! handle setting the memory dimensions where there are no Y elements assigned to this proc
330 IF (jps
.EQ
. -1 ) THEN
335 !begin: wig; 12-Mar-2008
336 ! This appears redundant with the conditionals above, but we get cases with only
337 ! one of the directions being set to "missing" when turning off extra processors.
338 ! This may break the handling of setting only one of nproc_x or nproc_y via the namelist.
339 IF (ipe
.EQ
. -1 .or
. jpe
.EQ
. -1) THEN
345 !end: wig; 12-Mar-2008
347 ! extend the patch dimensions out shw along edges of domain
348 IF ( ips
< ipe
.and
. jps
< jpe
) THEN !wig; 11-Mar-2008
349 IF ( mytask_x
.EQ
. 0 ) THEN
352 IF ( mytask_x
.EQ
. ntasks_x
-1 ) THEN
355 IF ( mytask_y
.EQ
. 0 ) THEN
358 IF ( mytask_y
.EQ
. ntasks_y
-1 ) THEN
361 ENDIF !wig; 11-Mar-2008
366 END SUBROUTINE compute_memory_dims_rsl_lite
368 end program da_vp_split