1 ! MPI functions for lagrangian transport in Module Firebrand Spotting
3 !-------------------------------------------------------------------------------
5 !-------------------------------------------------------------------------------
7 ! Capitals vs. lower-case: In FORTRAN words use CAPITAL case
9 ! Operators: use >, <, >=, <=, == (instead of .GT., .EQ., .GE. etc)
10 ! Ordering of DO loops for fast memory access:
11 ! The innermost loop goes over the 1st array dimension (fastest changing dimension)
12 ! do j = 1, Jend; do k = 1, Kend; do i = 1, Iend; A(i, k, j); end do; end do; end do
13 !-------------------------------------------------------------------------------
15 MODULE module_firebrand_spotting_mpi
17 USE module_domain, ONLY : get_ijk_from_grid, domain ! grid
18 USE module_configure, ONLY : grid_config_rec_type ! config_flags
19 !USE module_symbols_util, ONLY : WRFU_TimeInterval, WRFU_TimeIntervalGet, WRFU_TimeIntervalSet
20 #if ( defined(DM_PARALLEL) && ! defined(STUBMPI) )
27 #if ( defined(DM_PARALLEL) && ! defined(STUBMPI) )
30 neighbors, my_id, task_id, mpiprocs, &
31 left_id, right_id, up_id, down_id, &
32 upleft_id, upright_id, downleft_id, downright_id, &
34 fs_mpi_recvfrompatch_real, &
35 fs_mpi_recvfrompatch_int, &
36 fs_mpi_recvbuffsize, &
37 fs_mpi_recvbuff1_real, &
38 fs_mpi_recvbuff1_int, &
39 fs_mpi_checkreceive, &
41 fs_mpi_send2neighbors, & ! (task_id, mask, p_x, p_y, p_z, fs_p_m, fs_p_d, fs_p_e, fs_p_t, fs_p_v, p_id, p_dt)
43 fs_mpi_nothing2send, &
44 fs_mpi_recv, & ! (np_id, task_id, r_x, r_y, r_z, r_p_m, r_p_d, r_p_e, r_p_t, r_p_v, r_id, r_dt)
45 fs_mpi_sendbuff_real, &
46 fs_mpi_sendbuff_int, &
47 fs_mpi_sendbuffsize, &
48 fs_mpi_sendbuff1_real, &
52 ! THESE VARIABLES ARE IN MODULE SCOPE ! Careful with reassignments - don't reassign
53 ! SAVE attribute is default
55 !-------------------------------------------------------------------------------
56 ! variables in module scope: private, only available module-wide (host association)
57 !-------------------------------------------------------------------------------
58 ! They should not be declared again in suboutines (may not compile)
59 ! and must not be among routine dummy arguments. Consequently, cannot be IO variables
61 ! Runtime variables are not available at module level (e.g., namelist, tile dependent variables).
62 ! Include here only what can be set during compilation:
63 ! fixed parameters, allocatables, declarions (without initialization)
65 !-------------------------------------------------------------------------------
66 ! Fixed parameters ***MODULE SCOPE***
67 !-------------------------------------------------------------------------------
69 #if ( defined(DM_PARALLEL) && ! defined(STUBMPI) )
70 INTEGER, PARAMETER :: wrfdbg = 0
72 INTEGER, PARAMETER :: dp = KIND(0.d0) ! double precision
73 REAL, PARAMETER :: ZERO_dp = 0.0_dp ! this is a real type variable, not a double precision type
74 REAL, PARAMETER :: dp05 = 0.5_dp
75 REAL, PARAMETER :: dp1 = 1.0_dp
77 !-------------------------------------------------------------------------------
78 ! Generic variables for multiple use within module ***MODULE SCOPE***
79 !-------------------------------------------------------------------------------
81 CHARACTER (LEN=200), SAVE :: msg
82 CHARACTER (LEN=256), SAVE :: fmt
83 CHARACTER (LEN=200), DIMENSION(10) :: amsg
84 INTEGER, SAVE :: imsg ! loop counters
86 !-------------------------------------------------------------------------------
87 ! MPI variables - Move particles between tiles - ***MODULE SCOPE***
88 !-------------------------------------------------------------------------------
90 INTEGER, PARAMETER :: neighbors = 8 ! number of neighbor tasks - includes diagonals
91 INTEGER, PARAMETER :: mpi_datapack_nreal = 8 ! number of real type arrays packed together: 3 (xyz) + 5 (p_properties)
92 INTEGER, PARAMETER :: mpi_datapack_nint = 3 ! number of integer type arrays packed together 2 (id, src, dt)
94 INTEGER, SAVE :: my_id, left_id, right_id, up_id, down_id
95 INTEGER, SAVE :: upleft_id, upright_id, downleft_id, downright_id
96 INTEGER, SAVE, DIMENSION(neighbors) :: task_id
98 INTEGER, SAVE :: mpiprocs = 0
99 !-------------------------------------------------------------------------------
100 ! grid and cf are not here because dimensions are given at runtime (derived types)
101 ! grid values change at every interaction,
102 ! therefore, it needs to be a dummy argument
103 !-------------------------------------------------------------------------------
105 !-------------------------------------------------------------------------------
106 ! Variable bounds - Initialized in init, used in dummy arguments in driver
108 !-------------------------------------------------------------------------------
109 INTEGER, SAVE :: ids, jds, ide, jde, kde ! domain bounds
110 INTEGER, SAVE :: ims, jms, ime, jme, kms, kme ! memory bounds
111 INTEGER, SAVE :: is, ie, js, je, ks, ke ! patch start/end
112 INTEGER, SAVE :: ifps, jfps, ifpe, jfpe ! refined fire grid bounds
116 #if ( defined(DM_PARALLEL) && ! defined(STUBMPI) )
117 !=============================================================
118 !=============================================================
121 !******************************************************************
122 !******************************************************************
126 !******************************************************************
127 !******************************************************************
129 !=============================================================
130 FUNCTION fs_mpi_recvfrompatch_real(bsz, fromid) RESULT (buff)
131 !=============================================================
135 INTEGER, INTENT(IN) :: bsz, fromid
136 REAL, DIMENSION(bsz) :: buff ! p_x, p_y, p_z
137 INTEGER :: ierr, recvtag, ii
138 INTEGER :: stat(MPI_STATUS_SIZE)
140 !-------------------------------------------------------------------------------
141 ! Receive a real type array of size bsz
142 !-------------------------------------------------------------------------------
145 recvtag = 2000 + fromid ! 2000: tag real type data
146 CALL mpi_recv(buff, bsz, MPI_FLOAT, fromid, recvtag, MPI_COMM_WORLD, stat, ierr)
148 END FUNCTION fs_mpi_recvfrompatch_real
149 !=============================================================
150 !=============================================================
153 !=============================================================
154 FUNCTION fs_mpi_recvfrompatch_int(bsz, fromid) RESULT (buff)
155 !=============================================================
158 INTEGER, INTENT(IN) :: bsz, fromid
159 INTEGER, DIMENSION(bsz) :: buff
160 INTEGER :: ierr, recvtag, ii
161 INTEGER :: stat(MPI_STATUS_SIZE)
163 !-------------------------------------------------------------------------------
164 ! Receive an int type array of size bsz
165 !-------------------------------------------------------------------------------
168 recvtag = 3000 + fromid ! 3000: tag int type data
169 CALL mpi_recv(buff, bsz, MPI_INTEGER, fromid, recvtag, MPI_COMM_WORLD, stat, ierr)
171 END FUNCTION fs_mpi_recvfrompatch_int
172 !=============================================================
173 !=============================================================
176 !=============================================================
177 FUNCTION fs_mpi_recvbuffsize(fromid) RESULT(recvbuffsz)
178 !=============================================================
182 INTEGER, INTENT(IN) :: fromid
183 INTEGER :: recvbuffsz
184 INTEGER :: ierr, recvtag, sz , tag
185 INTEGER :: stat(MPI_STATUS_SIZE)
187 !-------------------------------------------------------------------------------
188 ! Receive the buffer size (zero or nbr)
189 !-------------------------------------------------------------------------------
192 sz = 1 ! one value corresponding to nbr (must send a number, receive is blocking)
193 tag = 1000 ! tag for communicating nbr
194 recvtag = tag + fromid
197 !-------------------------------------------------------------------------------
198 IF (fromid > -1) THEN
199 CALL mpi_recv(recvbuffsz, sz, MPI_INTEGER, fromid, recvtag, MPI_COMM_WORLD, stat, ierr)
202 END FUNCTION fs_mpi_recvbuffsize
203 !=============================================================
204 !=============================================================
206 !=============================================================
207 FUNCTION fs_mpi_recvbuff1_real(fromid) RESULT(recvbuffsz)
208 !=============================================================
212 INTEGER, INTENT(IN) :: fromid
214 INTEGER :: ierr, recvtag, sz , tag
215 INTEGER :: stat(MPI_STATUS_SIZE)
217 !-------------------------------------------------------------------------------
218 ! Receive a real type scalar
219 !-------------------------------------------------------------------------------
222 sz = 1 ! one value corresponding to nbr (must send a number, receive is blocking)
223 tag = 4000 ! tag for communicating nbr
224 recvtag = tag + fromid
227 !-------------------------------------------------------------------------------
228 !IF (fromid > -1) THEN
229 CALL mpi_recv(recvbuffsz, sz, MPI_FLOAT, fromid, recvtag, MPI_COMM_WORLD, stat, ierr)
232 END FUNCTION fs_mpi_recvbuff1_real
233 !=============================================================
234 !=============================================================
237 !=============================================================
238 FUNCTION fs_mpi_recvbuff1_int(fromid) RESULT(recvbuffsz)
239 !=============================================================
243 INTEGER, INTENT(IN) :: fromid
244 INTEGER :: recvbuffsz
245 INTEGER :: ierr, recvtag, sz , tag
246 INTEGER :: stat(MPI_STATUS_SIZE)
248 !-------------------------------------------------------------------------------
249 ! Receive a real type scalar
250 !-------------------------------------------------------------------------------
253 sz = 1 ! one value corresponding to nbr (must send a number, receive is blocking)
254 tag = 5000 ! tag for communicating nbr
255 recvtag = tag + fromid
258 !-------------------------------------------------------------------------------
259 !IF (fromid > -1) THEN
260 CALL mpi_recv(recvbuffsz, sz, MPI_INTEGER, fromid, recvtag, MPI_COMM_WORLD, stat, ierr)
263 END FUNCTION fs_mpi_recvbuff1_int
264 !=============================================================
265 !=============================================================
269 !=============================================================
270 SUBROUTINE fs_mpi_send2neighbors(task_id, mask, p_x, p_y, p_z, p_id, p_src, p_dt, fs_p_m, fs_p_d, fs_p_e, fs_p_t, fs_p_v)
271 !=============================================================
275 INTEGER, PARAMETER :: np = neighbors ! number or neighbor tasks
276 INTEGER, PARAMETER :: nreal = mpi_datapack_nreal ! number of real type arrays packed together
277 INTEGER, PARAMETER :: nint = mpi_datapack_nint ! number of integer type arrays packed together
279 INTEGER, INTENT(IN), DIMENSION(:) :: task_id
280 LOGICAL, INTENT(IN), DIMENSION(:) :: mask
281 INTEGER, INTENT(IN), DIMENSION(:) :: p_id, p_dt, p_src
282 REAL, INTENT(IN), DIMENSION(:) :: p_x, p_y, p_z, fs_p_m, fs_p_d, fs_p_e, fs_p_t, fs_p_v
284 LOGICAL, ALLOCATABLE, DIMENSION(:) :: masksendto
285 LOGICAL, ALLOCATABLE, DIMENSION(:) :: ml, mr, mu, md
286 INTEGER, ALLOCATABLE, DIMENSION(:) :: p_int
287 REAL, ALLOCATABLE, DIMENSION(:) :: p_real
289 INTEGER :: ierr, nbr, ii, sendto, k
291 !-------------------------------------------------------------------------------
293 !task_id = [left_id, right_id, up_id, down_id, upleft_id, upright_id, downleft_id, downright_id]
294 ALLOCATE(masksendto(SIZE(mask)), ml(SIZE(mask)), mr(SIZE(mask)), mu(SIZE(mask)), md(SIZE(mask)))
301 ml = (FLOOR(p_x) < is) ! MASK LEFT
302 mr = (FLOOR(p_x) > ie) ! MASK RIGHT
303 mu = (FLOOR(p_y) > je) ! MASK UP
304 md = (FLOOR(p_y) < js) ! MASK DONW
306 !-------------------------------------------------------------------------------
307 ! Send to adjacent patch boundaries
308 !-------------------------------------------------------------------------------
315 IF (sendto > -1) THEN
316 IF (sendto == left_id) masksendto = ((mask .AND. ml) .AND. (.NOT. ( md .OR. mu) )) ! LEFT
317 IF (sendto == right_id) masksendto = ((mask .AND. mr) .AND. (.NOT. ( md .OR. mu) )) ! RIGHT
318 IF (sendto == up_id) masksendto = ((mask .AND. mu) .AND. (.NOT. ( ml .OR. mr) )) ! UP
319 IF (sendto == down_id) masksendto = ((mask .AND. md) .AND. (.NOT. ( ml .OR. mr) )) ! DONW
321 IF (sendto == upleft_id) masksendto = (mask .AND. (mu .AND. ml) ) ! UPLEFT
322 IF (sendto == upright_id) masksendto = (mask .AND. (mu .AND. mr) ) ! UPRIGHT
323 IF (sendto == downleft_id) masksendto = (mask .AND. (md .AND. ml) ) ! DOWNLEFT
324 IF (sendto == downright_id) masksendto = (mask .AND. (md .AND. mr) ) ! DOWNRIGHT
326 nbr = COUNT(masksendto)
329 CALL fs_mpi_nothing2send(sendto=sendto)
333 ! WRITE (msg,'(2(i6,1x))') sendto, nbr
334 ! CALL wrf_debug (wrfdbg, 'SPFire_mpi_sendaway sendto_id nbr: '//msg)
336 ALLOCATE(p_real(nreal*nbr), p_int(nint*nbr))
338 p_real = [PACK(p_x,masksendto),&
339 PACK(p_y,masksendto),&
340 PACK(p_z,masksendto),&
341 PACK(fs_p_m,masksendto),&
342 PACK(fs_p_d,masksendto),&
343 PACK(fs_p_e,masksendto),&
344 PACK(fs_p_t,masksendto),&
345 PACK(fs_p_v,masksendto)]
346 p_int = [PACK(p_id,masksendto),&
347 PACK(p_src,masksendto),&
348 PACK(p_dt,masksendto)]
350 CALL fs_mpi_sendbuffsize(sendto=sendto, nbr=nbr)
351 CALL fs_mpi_sendbuff_real(sendto=sendto, bsz=nbr*nreal, buff=p_real)
352 CALL fs_mpi_sendbuff_int(sendto=sendto, bsz=nbr*nint, buff=p_int)
355 ! WRITE(msg,'(3(i6,1x),6(f12.6,1x))') sendto, p_int(k), p_int(k+nbr), p_real(k), p_real(k+nbr), p_real(k+2*nbr), p_real(k+4*nbr), p_real(k+6*nbr), p_real(k+7*nbr)
356 ! CALL wrf_debug (wrfdbg, 'SPFire_mpi send >>> '// msg)
360 DEALLOCATE(p_real, p_int)
365 DEALLOCATE(masksendto, ml, mr, mu, md)
367 !=============================================================
368 END SUBROUTINE fs_mpi_send2neighbors
369 !=============================================================
374 !=============================================================
375 SUBROUTINE fs_mpi_init(grid)
376 !=============================================================
378 USE module_dm, ONLY : ntasks_x, ntasks_y, mytask_x, mytask_y ! total tasks in x,y dimensions, this task i,j
383 TYPE(domain), INTENT(IN) :: grid ! input data **Note: Intent IN**
385 INTEGER :: ierr, numprocs
386 LOGICAL :: mpi_inited
387 CHARACTER (LEN=10) :: envval
399 CALL MPI_INITIALIZED( mpi_inited, ierr)
401 IF ( .NOT. mpi_inited ) &
402 CALL wrf_error_fatal( "failed to initialize MPI")
404 !-------------------------------------------------------------------------------
406 !-------------------------------------------------------------------------------
408 CALL MPI_COMM_RANK( MPI_COMM_WORLD, my_id, ierr)
409 CALL MPI_COMM_SIZE( MPI_COMM_WORLD, numprocs, ierr)
410 mpiprocs = numprocs ! mpiprocs and my_id are in module scope
412 WRITE (msg,'(2(i6,1x))') my_id, numprocs
413 CALL wrf_debug (wrfdbg, 'SPFire_mpi mpi initialized. myid, nproc: '//msg)
415 ! WRITE (msg,'(4(i6,1x))') ntasks_x, ntasks_y, mytask_x, mytask_y
416 ! CALL wrf_debug (wrfdbg, 'SPFire_mpi tasks: '//msg)
418 ! WRITE (msg,'(5(i9,1x))') my_id, is, ie, js, je
419 ! CALL wrf_debug (wrfdbg, 'SPFire_mpi my_id, tile bounds: '//msg)
421 ! CALL get_environment_variable ("WRF_NUM_TILES_X",envval, status=ierr)
422 ! WRITE (msg,'(1(a10,1x))') envval
423 ! CALL wrf_debug (wrfdbg, 'SPFire_mpi: '//msg)
425 !-------------------------------------------------------------------------------
426 ! Who are my neighbors? all neighbors *_id are declared in module scope
427 !-------------------------------------------------------------------------------
429 ! get neighbors (-1 at domain bounds)
430 down_id = my_id - ntasks_x
431 up_id = my_id + ntasks_x
432 IF( mytask_y == 0) down_id = -1
433 IF( mytask_y == (ntasks_y-1) ) up_id = -1
435 downleft_id = down_id - 1
436 downright_id = down_id + 1
437 upleft_id = up_id - 1
438 upright_id = up_id + 1
440 IF (down_id == -1) downleft_id = -1
441 IF (down_id == -1) downright_id = -1
442 IF (up_id == -1) upleft_id = -1
443 IF (up_id == -1) upright_id = -1
447 IF( mytask_x == 0) left_id = -1
448 IF( mytask_x == (ntasks_x-1) ) right_id =-1
450 IF (left_id == -1) downleft_id = -1
451 IF (left_id == -1) upleft_id = -1
452 IF (right_id == -1) downright_id = -1
453 IF (right_id == -1) upright_id = -1
455 ! WRITE (msg,'(5(i6,1x))') my_id, left_id, right_id, up_id, down_id
456 ! CALL wrf_debug (wrfdbg, 'SPFire_mpi id, L, R, U, D: '//msg)
458 ! WRITE (msg,'(4(i6,1x))') downleft_id, downright_id, upleft_id, upright_id
459 ! CALL wrf_debug (wrfdbg, 'SPFire_mpi diag, DL, DR, UL, UR: '//msg)
461 ! task_id is in module scope
462 task_id = [left_id, right_id, up_id, down_id, upleft_id, upright_id, downleft_id, downright_id]
465 ! row and column of the current process within the domain:
466 ! left_right ( 0 : ntasks_x -1 )
467 ! up_down ( 0 : ntasks_y -1 )
469 ! my_task_i = MOD(my_id , ntasks_x)
470 ! my_task_j = my_id / ntasks_x
472 !-------------------------------------------------------------------------------
473 ! Set bounds for finding the tiles to send and receive
474 ! *** variables declared in MODULE SCOPE ***
475 !-------------------------------------------------------------------------------
477 CALL get_local_ijk(grid, &
478 ips=is, jps=js, ipe=ie, jpe=je, kps=ks, kpe=ke)
480 ! WRITE (msg,'(6(i6,1x))') is, ie, js, je, ks, ke
481 ! CALL wrf_debug (wrfdbg, 'SPFire_mpi_init tile bounds: '//msg)
483 END SUBROUTINE fs_mpi_init
484 !=============================================================
485 !=============================================================
489 !=============================================================
490 SUBROUTINE fs_mpi_nothing2send(sendto)
491 !=============================================================
495 INTEGER, INTENT(IN) :: sendto
496 INTEGER :: ierr, tag, nbr, sz
498 !-------------------------------------------------------------------------------
499 ! Send a signal with value zero
500 !-------------------------------------------------------------------------------
502 sz = 1 ! one value corresponding to nbr (must send a number, receive is blocking)
504 tag = 1000 + my_id ! tag for communicating nbr
506 IF (sendto > -1) THEN
507 CALL mpi_send(nbr, sz, MPI_INTEGER, sendto, tag, MPI_COMM_WORLD, ierr)
510 END SUBROUTINE fs_mpi_nothing2send
512 !=============================================================
513 !=============================================================
516 !=============================================================
517 FUNCTION fs_mpi_checkreceive(task_list, np) RESULT(buffsz)
518 !=============================================================
523 INTEGER, DIMENSION(np) :: buffsz
524 INTEGER, INTENT(IN), DIMENSION(:) :: task_list
529 !-------------------------------------------------------------------------------
530 ! Anything to receive from any tile?
531 !-------------------------------------------------------------------------------
535 tmp2 = fs_mpi_recvbuffsize(fromid=task_list(ii))
539 END FUNCTION fs_mpi_checkreceive
541 !=============================================================
542 !=============================================================
545 !=============================================================
546 SUBROUTINE fs_mpi_recv(np_id, task_id, r_x, r_y, r_z, r_p_m, r_p_d, r_p_e, r_p_t, r_p_v, r_id, r_src, r_dt)
547 !=============================================================
551 INTEGER, PARAMETER :: nreal = mpi_datapack_nreal ! number of real type arrays packed together
552 INTEGER, PARAMETER :: nint = mpi_datapack_nint ! number of integer type arrays packed together
553 INTEGER, PARAMETER :: np = neighbors
555 INTEGER, INTENT(IN), DIMENSION(:) :: task_id, np_id
556 REAL, INTENT(OUT),DIMENSION(:) :: r_x, r_y, r_z
557 INTEGER, INTENT(OUT),DIMENSION(:) :: r_id, r_dt, r_src
558 REAL, INTENT(OUT),DIMENSION(:) :: r_p_m, r_p_d, r_p_e, r_p_t, r_p_v
560 INTEGER :: np_sum, istart, iend, ii
561 INTEGER, ALLOCATABLE, DIMENSION(:,:) :: arr_int
562 REAL, ALLOCATABLE, DIMENSION(:,:) :: arr_real
564 !-------------------------------------------------------------------------------
567 ALLOCATE(arr_real(np_sum,nreal))
568 ALLOCATE(arr_int(np_sum, nint))
573 ! WRITE (msg,'(8(i4,1x))') (np_id(ii), ii=1,np)
574 ! CALL wrf_debug (wrfdbg, 'SPFire_mpi_recv_all np_id >>> '// msg)
575 ! WRITE (msg,'(8(i4,1x))') (task_id(ii), ii=1,np)
576 ! CALL wrf_debug (wrfdbg, 'SPFire_mpi_recv_all task_id >>> '// msg)
581 IF (np_id(ii) > 0) THEN
583 iend = istart + np_id(ii) -1
585 ! WRITE (msg,'(4(i4,1x))') ii, np_id(ii), istart, iend
586 ! CALL wrf_debug (wrfdbg, 'SPFire_mpi recv all id, nbr, istart:iend >>> '// msg)
587 ! WRITE (msg,*) SHAPE(arr_real)
588 ! CALL wrf_debug (wrfdbg, 'SPFire_mpi_recv_all shape arr_real >>> '// msg)
590 arr_real(istart:iend,:) = RESHAPE( &
591 fs_mpi_recvfrompatch_real(bsz=np_id(ii)*nreal, fromid=task_id(ii)), &
594 arr_int(istart:iend,:) = RESHAPE( &
595 fs_mpi_recvfrompatch_int(bsz=np_id(ii)*nint, fromid=task_id(ii)), &
598 istart = istart + np_id(ii)
607 r_p_m = arr_real(:,4)
608 r_p_d = arr_real(:,5)
609 r_p_e = arr_real(:,6)
610 r_p_t = arr_real(:,7)
611 r_p_v = arr_real(:,8)
617 END SUBROUTINE fs_mpi_recv
619 !=============================================================
620 !=============================================================
624 !=============================================================
625 SUBROUTINE fs_mpi_sendbuff_real(bsz, sendto, buff)
626 !=============================================================
630 INTEGER, INTENT(IN) :: bsz, sendto
631 REAL, INTENT(IN), DIMENSION(bsz) :: buff ! p_x, p_y, p_z
634 !-------------------------------------------------------------------------------
635 ! Send the type-real buffer:
636 ! a 1-d packed array composed of "nreal" flattened arrays, each of size "nbr"
637 !-------------------------------------------------------------------------------
640 tag = 2000 + my_id ! 2000: tag real type data
641 CALL mpi_send(buff, bsz, MPI_FLOAT, sendto, tag, MPI_COMM_WORLD, ierr)
643 END SUBROUTINE fs_mpi_sendbuff_real
644 !=============================================================
646 !=============================================================
650 !=============================================================
651 SUBROUTINE fs_mpi_sendbuff_int(bsz, sendto, buff)
652 !=============================================================
656 INTEGER, INTENT(IN) :: bsz, sendto
657 INTEGER, INTENT(IN), DIMENSION(bsz) :: buff ! p_id, p_dt
660 !-------------------------------------------------------------------------------
661 ! Send the integer buffer:
662 ! a 1-d packed array composed of "nint" flattened arrays, each of size "nbr"
663 !-------------------------------------------------------------------------------
666 tag = 3000 + my_id ! 3000: tag int type data
667 CALL mpi_send(buff, bsz, MPI_INTEGER, sendto, tag, MPI_COMM_WORLD, ierr)
669 END SUBROUTINE fs_mpi_sendbuff_int
670 !=============================================================
671 !=============================================================
675 !=============================================================
676 SUBROUTINE fs_mpi_sendbuffsize(nbr, sendto)
677 !=============================================================
681 INTEGER, INTENT(IN) :: nbr
682 INTEGER, INTENT(IN) :: sendto
683 INTEGER :: ierr, tag, sz
685 !-------------------------------------------------------------------------------
686 ! Send an integer scalar or
687 ! Send the buffer size for an incoming array:
688 ! the number of elements in each array (real or int) that will be packed into one data buffer
689 ! and sent over by fs_mpi_sendbuff_real/int
690 !-------------------------------------------------------------------------------
693 sz = 1 ! one value corresponding to nbr (must send a number, receive is blocking)
694 tag = 1000+my_id ! tag for communicating nbr
696 CALL mpi_send(nbr, sz, MPI_INTEGER, sendto, tag, MPI_COMM_WORLD, ierr)
698 END SUBROUTINE fs_mpi_sendbuffsize
699 !=============================================================
700 !=============================================================
704 !=============================================================
705 SUBROUTINE fs_mpi_sendbuff1_real(nbr, sendto)
706 !=============================================================
710 REAL, INTENT(IN) :: nbr
711 INTEGER, INTENT(IN) :: sendto
712 INTEGER :: ierr, tag, sz
714 !-------------------------------------------------------------------------------
715 ! Send a real type scalar
716 !-------------------------------------------------------------------------------
719 sz = 1 ! one value corresponding to nbr (must send a number, receive is blocking)
720 tag = 4000+my_id ! tag for communicating nbr
722 CALL mpi_send(nbr, sz, MPI_FLOAT, sendto, tag, MPI_COMM_WORLD, ierr)
724 END SUBROUTINE fs_mpi_sendbuff1_real
725 !=============================================================
726 !=============================================================
729 !=============================================================
730 SUBROUTINE fs_mpi_sendbuff1_int(nbr, sendto)
731 !=============================================================
735 INTEGER, INTENT(IN) :: nbr
736 INTEGER, INTENT(IN) :: sendto
737 INTEGER :: ierr, tag, sz
739 !-------------------------------------------------------------------------------
740 ! Send an integer scalar or
741 ! Send the buffer size for an incoming array:
742 ! the number of elements in each array (real or int) that will be packed into one data buffer
743 ! and sent over by fs_mpi_sendbuff_real/int
744 !-------------------------------------------------------------------------------
747 sz = 1 ! one value corresponding to nbr (must send a number, receive is blocking)
748 tag = 5000+my_id ! tag for communicating nbr
750 CALL mpi_send(nbr, sz, MPI_INTEGER, sendto, tag, MPI_COMM_WORLD, ierr)
752 END SUBROUTINE fs_mpi_sendbuff1_int
754 !=============================================================
755 !=============================================================
757 !=============================================================
758 SUBROUTINE get_local_ijk(grid, ips, ipe, jps, jpe, kps, kpe)
759 !=============================================================
761 USE module_domain, ONLY: get_ijk_from_grid
765 TYPE(DOMAIN), INTENT(IN) :: grid
766 INTEGER, INTENT(OUT), OPTIONAL :: ips, ipe, jps, jpe, kps, kpe
768 INTEGER :: iips, iipe, jjps, jjpe, kkps, kkpe
769 INTEGER :: iims, iime, jjms, jjme, kkms, kkme
770 INTEGER :: iids, iide, jjds, jjde, kkds, kkde
774 (.NOT. PRESENT(ips)) .AND. &
775 (.NOT. PRESENT(jps)) .AND. &
776 (.NOT. PRESENT(kps)) .AND. &
777 (.NOT. PRESENT(ipe)) .AND. &
778 (.NOT. PRESENT(jpe)) .AND. &
779 (.NOT. PRESENT(kpe))) &
780 CALL wrf_debug ( 1, 'get_local_ijk function is NOT requesting a result' )
782 CALL get_ijk_from_grid ( grid , &
783 iids, iide, jjds, jjde, kkds, kkde, &
784 iims, iime, jjms, jjme, kkms, kkme, &
785 iips, iipe, jjps, jjpe, kkps, kkpe)
787 IF (PRESENT(ips)) ips = iips
788 IF (PRESENT(jps)) jps = jjps
789 IF (PRESENT(kps)) kps = kkps
790 IF (PRESENT(ipe)) ipe = iipe
791 IF (PRESENT(jpe)) jpe = jjpe
792 IF (PRESENT(kpe)) kpe = kkpe
794 END SUBROUTINE get_local_ijk
796 !=============================================================
797 !=============================================================
799 END MODULE module_firebrand_spotting_mpi