Line data Source code
1 : module edyn_mpi
2 : use shr_kind_mod, only: r8 => shr_kind_r8, cl=>shr_kind_cl
3 : use cam_logfile, only: iulog
4 : use cam_abortutils, only: endrun
5 :
6 : use spmd_utils, only: masterproc
7 : use mpi, only: mpi_comm_size, mpi_comm_rank, mpi_comm_split
8 : use mpi, only: MPI_PROC_NULL, mpi_wait, MPI_STATUS_SIZE
9 : use mpi, only: MPI_INTEGER, MPI_REAL8, MPI_SUCCESS, MPI_SUM
10 :
11 : implicit none
12 : private
13 : save
14 :
15 : ! Public data
16 : public :: mlon0, mlon1
17 : public :: omlon1
18 : public :: mlat0, mlat1
19 : public :: mlev0, mlev1
20 : public :: mytid
21 : public :: lon0, lon1
22 : public :: lat0, lat1
23 : public :: lev0, lev1
24 : public :: nlev_geo
25 : public :: ntask
26 : public :: ntaski
27 : public :: ntaskj
28 : public :: tasks
29 : public :: nmagtaski
30 : public :: nmagtaskj
31 : public :: mytidi
32 : ! Public type
33 : public :: array_ptr_type
34 : ! Public interfaces
35 : public :: mp_init
36 : public :: mp_geo_halos
37 : public :: mp_pole_halos
38 : public :: mp_mag_halos
39 : public :: mp_scatter_phim
40 : public :: mp_mageq
41 : public :: mp_mageq_jpm1
42 : public :: mp_magpole_2d
43 : public :: mp_mag_foldhem
44 : public :: mp_mag_periodic_f2d
45 : public :: mp_gather_edyn
46 : public :: mp_mageq_jpm3
47 : public :: mp_mag_jslot
48 : public :: mp_magpoles
49 : public :: ixfind
50 : public :: mp_magpole_3d
51 : public :: setpoles
52 : public :: mp_gatherlons_f3d
53 : public :: mp_scatterlons_f3d
54 : public :: mp_exchange_tasks
55 : public :: mp_distribute_mag
56 : public :: mp_distribute_geo
57 :
58 : !
59 : ! Number of MPI tasks and current task id (geo or mag):
60 : !
61 : integer :: &
62 : ntask, & ! number of mpi tasks
63 : mytid ! my task id
64 : !
65 : ! Geographic subdomains for current task:
66 : !
67 :
68 : integer, protected :: &
69 : nlev_geo, & !
70 : lon0=1, lon1=0, & ! first and last lons for each task
71 : lat0=1, lat1=0, & ! first and last lats for each task
72 : lev0, lev1, & ! first and last levs for each task (not distributed)
73 : ntaski, & ! number of tasks in lon dimension
74 : ntaskj, & ! number of tasks in lat dimension
75 : mytidi ! i coord for current task in task table
76 : integer :: &
77 : nlon_geo, & ! size of geo lon dimension
78 : nlat_geo, & ! size of geo lat dimension
79 : mxlon, & ! max number of subdomain lon points among all tasks
80 : mxlat, & ! max number of subdomain lat points among all tasks
81 : mytidj ! j coord for current task in task table
82 : !
83 : ! Magnetic subdomains for current task:
84 : !
85 : integer, protected :: &
86 : nmagtaski, & ! number of tasks in mag lon dimension
87 : nmagtaskj, & ! number of tasks in mag lat dimension
88 : magtidi, & ! i coord for current task in task table
89 : magtidj, & ! j coord for current task in task table
90 : mlat0=1,mlat1=0, & ! first and last mag lats for each task
91 : mlon0=1,mlon1=0, & ! first and last mag lons for each task
92 : omlon1=0, & ! last mag lons for each task to remove periodic point from outputs
93 : mlev0,mlev1 ! first and last mag levs (not distributed)
94 :
95 : integer :: &
96 : mxmaglon, & ! max number of mag subdomain lon points among all tasks
97 : mxmaglat ! max number of mag subdomain lat points among all tasks
98 :
99 : integer, allocatable :: &
100 : itask_table_geo(:,:), & ! 2d table of tasks on geographic grid (i,j)
101 : itask_table_mag(:,:) ! 2d table of tasks on mag grid (i,j)
102 :
103 : integer :: cols_comm ! communicators for each task column
104 : integer :: rows_comm ! communicators for each task row
105 : !
106 : ! Task type: subdomain information for all tasks, known by all tasks:
107 : !
108 : type task
109 : integer :: mytid ! task id
110 : !
111 : ! Geographic subdomains in task structure:
112 : integer :: mytidi = -1 ! task coord in longitude dimension of task table
113 : integer :: mytidj = -1 ! task coord in latitude dimension of task table
114 : integer :: nlats = 0 ! number of latitudes calculated by this task
115 : integer :: nlons = 0 ! number of longitudes calculated by this task
116 : integer :: lat0 = 1, lat1 = 0 ! first and last latitude indices
117 : integer :: lon0 = 1, lon1 = 0 ! first and last longitude indices
118 : !
119 : ! Magnetic subdomains in task structure:
120 : integer :: magtidi = -1 ! task coord in mag longitude dimension of task table
121 : integer :: magtidj = -1 ! task coord in mag latitude dimension of task table
122 : integer :: nmaglats = 0 ! number of mag latitudes calculated by this task
123 : integer :: nmaglons = 0 ! number of mag longitudes calculated by this task
124 : integer :: mlat0 = 1,mlat1 = 0 ! first and last latitude indices
125 : integer :: mlon0 = 1,mlon1 = 0 ! first and last longitude indices
126 : end type task
127 : !
128 : ! type(task) :: tasks(ntask) will be made available to all tasks
129 : ! (so each task has information about all tasks)
130 : !
131 : type(task), allocatable :: tasks(:)
132 : !
133 : ! Conjugate points in mag subdomains, for mp_mag_foldhem
134 : !
135 : integer,allocatable,dimension(:) :: & ! (ntask)
136 : nsend_south, & ! number of south lats to send to north (each task)
137 : nrecv_north ! number of north lats to send to south (each task)
138 : integer,allocatable,dimension(:,:) :: & ! (mxlats,ntask)
139 : send_south_coords, & ! south j lats to send to north
140 : recv_north_coords ! north j lats to recv from south
141 :
142 : !
143 : ! Magnetic grid parameters
144 : !
145 : integer :: nmlat ! number of mag latitudes
146 : integer :: nmlath ! index of magnetic equator
147 : integer :: nmlonp1 ! number of longitudes plus periodic point
148 : integer :: nmlev ! number of levels (= nlev in geo grid)
149 :
150 : type array_ptr_type
151 : real(r8),pointer :: ptr(:,:,:) ! (k,i,j)
152 : end type array_ptr_type
153 :
154 : integer, protected :: mpi_comm_edyn = -9999
155 :
156 : logical, parameter :: debug = .false.
157 :
158 : contains
159 : !-----------------------------------------------------------------------
160 768 : subroutine mp_init(mpi_comm, ionos_npes, nlon_geo_in, nlat_geo_in, nlev_geo_in)
161 : !
162 : ! Initialize MPI, and allocate task table.
163 : !
164 : integer, intent(in) :: mpi_comm
165 : integer, intent(in) :: ionos_npes
166 : integer, intent(in) :: nlon_geo_in
167 : integer, intent(in) :: nlat_geo_in
168 : integer, intent(in) :: nlev_geo_in
169 :
170 : integer :: ierr
171 : integer :: color, npes
172 : character(len=cl) :: errmsg
173 :
174 768 : nlon_geo = nlon_geo_in
175 768 : nlat_geo = nlat_geo_in
176 768 : nlev_geo = nlev_geo_in
177 768 : ntask = ionos_npes
178 :
179 768 : call mpi_comm_size(mpi_comm, npes, ierr)
180 768 : call mpi_comm_rank(mpi_comm, mytid, ierr)
181 768 : color = mytid/ionos_npes
182 768 : call mpi_comm_split(mpi_comm, color, mytid, mpi_comm_edyn, ierr)
183 :
184 : !
185 : ! Allocate array of task structures:
186 : !
187 297216 : allocate(tasks(0:npes-1), stat=ierr)
188 768 : if (ierr /= 0) then
189 0 : write(errmsg,"('>>> mp_init: error allocating tasks(',i3,')')") ntask
190 0 : write(iulog,*) trim(errmsg)
191 0 : call endrun(errmsg)
192 : endif
193 768 : end subroutine mp_init
194 : !-----------------------------------------------------------------------
195 768 : subroutine mp_distribute_geo(lonndx0, lonndx1, latndx0, latndx1, levndx0, levndx1, ntaski_in, ntaskj_in)
196 : !
197 : ! Args:
198 : integer, intent(in) :: lonndx0
199 : integer, intent(in) :: lonndx1
200 : integer, intent(in) :: latndx0
201 : integer, intent(in) :: latndx1
202 : integer, intent(in) :: levndx0
203 : integer, intent(in) :: levndx1
204 : integer, intent(in) :: ntaski_in
205 : integer, intent(in) :: ntaskj_in
206 : !
207 : ! Local:
208 : integer :: i, j, n, irank, ier, tidrow, nj, ni
209 :
210 : !
211 : ! Define all task structures with current task values
212 : ! (redundant for alltoall):
213 : ! Use WACCM subdomains:
214 : !
215 768 : lon0 = lonndx0 ; lon1 = lonndx1
216 768 : lat0 = latndx0 ; lat1 = latndx1
217 768 : lev0 = levndx0 ; lev1 = levndx1
218 :
219 768 : ntaski = ntaski_in
220 768 : ntaskj = ntaskj_in
221 : !
222 : ! Allocate and set 2d table of tasks:
223 : !
224 3072 : allocate(itask_table_geo(-1:ntaski,-1:ntaskj),stat=ier)
225 768 : if (ier /= 0) then
226 0 : write(iulog,"('>>> Error allocating itable: ntaski,j=',2i4)") ntaski,ntaskj
227 0 : call endrun('itask_table_geo')
228 : endif
229 392448 : itask_table_geo(:,:) = MPI_PROC_NULL
230 :
231 768 : irank = 0
232 768 : mytidi = -1
233 768 : mytidj = -1
234 25344 : do j = 0, ntaskj-1
235 319488 : do i = 0, ntaski-1
236 294912 : itask_table_geo(i,j) = irank
237 294912 : if (mytid == irank) then
238 768 : mytidi = i
239 768 : mytidj = j
240 : end if
241 319488 : irank = irank+1
242 : end do
243 : !
244 : ! Tasks are periodic in longitude:
245 : ! (this is not done in tiegcm, but here sub mp_geo_halos depends on it)
246 : !
247 24576 : itask_table_geo(-1,j) = itask_table_geo(ntaski-1,j)
248 25344 : itask_table_geo(ntaski,j) = itask_table_geo(0,j)
249 :
250 : end do ! j=0,ntaskj-1
251 :
252 : if (debug ) then
253 : write(6,"('mp_distribute_geo: mytid=',i4,' ntaski,j=',2i4,' mytidi,j=',2i4,&
254 : ' lon0,1=',2i4,' lat0,1=',2i4,' lev0,1=',2i4)") &
255 : mytid,ntaski,ntaskj,mytidi,mytidj,lon0,lon1,lat0,lat1,lev0,lev1
256 : !
257 : ! Print table to stdout, including -1,ntaski:
258 : !
259 : write(6,"(/,'ntask=',i3,' ntaski=',i2,' ntaskj=',i2,' Geo Task Table:')") &
260 : ntask,ntaski,ntaskj
261 : do j=-1,ntaskj
262 : write(iulog,"('j=',i3,' itask_table_geo(:,j)=',100i3)") j,itask_table_geo(:,j)
263 : enddo
264 : endif
265 : !
266 : ! Calculate start and end indices in lon,lat dimensions for each task:
267 : ! For WACCM: do not call distribute_1d - lon0,1, lat0,1 are set from
268 : ! waccm grid above.
269 : !
270 : ! call distribute_1d(1,nlon,ntaski,mytidi,lon0,lon1)
271 : ! call distribute_1d(1,nlat,ntaskj,mytidj,lat0,lat1)
272 :
273 768 : nj = lat1-lat0+1 ! number of latitudes for this task
274 768 : ni = lon1-lon0+1 ! number of longitudes for this task
275 : !
276 : ! Report my stats to stdout:
277 : if (debug ) then
278 : write(6,"(/,'mytid=',i3,' mytidi,j=',2i3,' lat0,1=',2i3,' (',i2,') lon0,1=',2i3,' (',i2,') ncells=',i4)") &
279 : mytid,mytidi,mytidj,lat0,lat1,nj,lon0,lon1,ni
280 : endif
281 : !
282 : ! Define all task structures with current task values
283 : ! (redundant for alltoall):
284 : !
285 295680 : do n=0,ntask-1
286 294912 : tasks(n)%mytid = mytid
287 294912 : tasks(n)%mytidi = mytidi
288 294912 : tasks(n)%mytidj = mytidj
289 294912 : tasks(n)%nlats = nj
290 294912 : tasks(n)%nlons = ni
291 294912 : tasks(n)%lat0 = lat0
292 294912 : tasks(n)%lat1 = lat1
293 294912 : tasks(n)%lon0 = lon0
294 295680 : tasks(n)%lon1 = lon1
295 : enddo
296 : !
297 : ! All tasks must have at least 4 longitudes:
298 : !
299 768 : if (mytid < ntask) then
300 295680 : do n=0,ntask-1
301 :
302 : if (debug) then
303 : write(6,"('mp_distribute_geo: n=',i3,' tasks(n)%nlons=',i3,' tasks(n)%nlats=',i3)") &
304 : n,tasks(n)%nlons,tasks(n)%nlats
305 : endif
306 :
307 295680 : if (tasks(n)%nlons < 4) then
308 : write(iulog,"('>>> mp_distribute_geo: each task must carry at least 4 longitudes. task=',i4,' nlons=',i4)") &
309 0 : n,tasks(n)%nlons
310 0 : call endrun('edyn_mpi: nlons per task')
311 : endif
312 : enddo
313 : endif
314 :
315 : !
316 : ! Create sub-communicators for each task row (used by mp_geopole_3d):
317 : !
318 : ! call mpi_comm_split(mpi_comm_edyn,mod(mytid,ntaskj),mytid,rows_comm,ier)
319 : ! call MPI_Comm_rank(rows_comm,tidrow,ier)
320 :
321 768 : call mpi_comm_split(mpi_comm_edyn,mytidj,mytid,rows_comm,ier)
322 768 : call MPI_Comm_rank(rows_comm,tidrow,ier)
323 :
324 : if (debug.and.masterproc) then
325 : write(iulog,"('mp_distribute_geo: ntaskj=',i3,' tidrow=',i3)") &
326 : ntaskj,tidrow
327 : endif
328 :
329 768 : end subroutine mp_distribute_geo
330 : !-----------------------------------------------------------------------
331 768 : subroutine mp_distribute_mag(nmlonp1_in, nmlat_in, nmlath_in, nmlev_in)
332 : !
333 : ! Args:
334 : integer, intent(in) :: nmlat_in ! number of mag latitudes
335 : integer, intent(in) :: nmlath_in ! index of magnetic equator
336 : integer, intent(in) :: nmlonp1_in ! number of longitudes plus periodic point
337 : integer, intent(in) :: nmlev_in ! number of levels (= nlev in geo grid)
338 : !
339 : ! Local:
340 : integer :: i, j, n, irank, ier, tidcol, nj, ni, ncells
341 : character(len=cl) :: errmsg
342 : character(len=*), parameter :: subname = 'mp_distribute_mag'
343 : !
344 : ! Number of tasks in mag lon,lat same as geo grid:
345 : !
346 768 : nmagtaski = ntaski
347 768 : nmagtaskj = ntaskj
348 : !
349 : ! Store magetic grid parameters
350 768 : nmlat = nmlat_in
351 768 : nmlath = nmlath_in
352 768 : nmlonp1 = nmlonp1_in
353 768 : nmlev = nmlev_in
354 768 : if (mytid<ntask) then
355 : !
356 : ! Vertical dimension is not distributed:
357 768 : mlev0 = 1
358 768 : mlev1 = nmlev
359 : !
360 : ! Allocate and set 2d table of tasks:
361 3072 : allocate(itask_table_mag(-1:nmagtaski,-1:nmagtaskj),stat=ier)
362 768 : if (ier /= 0) then
363 0 : write(errmsg, "(a,2(a,i0))") subname, &
364 0 : '>>> Error allocating itable: nmagtaski = ', nmagtaski, &
365 0 : ', j = ', nmagtaskj
366 0 : if (masterproc) then
367 0 : write(iulog, errmsg)
368 : end if
369 0 : call endrun(errmsg)
370 : endif
371 392448 : itask_table_mag(:,:) = MPI_PROC_NULL
372 768 : irank = 0
373 25344 : do j = 0, nmagtaskj-1
374 319488 : do i = 0, nmagtaski-1
375 294912 : itask_table_mag(i,j) = irank
376 294912 : if (mytid == irank) then
377 768 : magtidi = i
378 768 : magtidj = j
379 : endif
380 319488 : irank = irank + 1
381 : end do
382 : !
383 : ! Tasks are periodic in longitude:
384 : !
385 24576 : itask_table_mag(-1,j) = itask_table_mag(nmagtaski-1,j)
386 25344 : itask_table_mag(nmagtaski,j) = itask_table_mag(0,j)
387 : end do
388 :
389 : if (debug .and. masterproc) then
390 : !
391 : ! Print table to stdout:
392 : write(iulog,"(/,a,/a,i3,a,i2,a,i2,' Mag Task Table:')") subname, &
393 : 'ntask=',ntask,' nmagtaski=',nmagtaski,' nmagtaskj=',nmagtaskj
394 : do j = -1, nmagtaskj
395 : write(iulog,"('j = ',i3,', itask_table_mag(:,j) = ',100i3)") &
396 : j, itask_table_mag(:,j)
397 : end do
398 : end if
399 : !
400 : ! Calculate start and end indices in mag lon,lat dimensions for each task:
401 : !
402 768 : call distribute_1d(1, nmlonp1, nmagtaski, magtidi, mlon0, mlon1)
403 768 : call distribute_1d(1, nmlat, nmagtaskj, magtidj, mlat0, mlat1)
404 :
405 768 : omlon1 = mlon1
406 768 : if (omlon1 == nmlonp1) then
407 64 : omlon1 = omlon1-1
408 : end if
409 :
410 768 : nj = mlat1 - mlat0 + 1 ! number of mag latitudes for this task
411 768 : ni = mlon1 - mlon0 + 1 ! number of mag longitudes for this task
412 768 : ncells = nj * ni ! total number of grid cells for this task
413 :
414 : if (debug) then
415 : !
416 : ! Report my stats to stdout:
417 : write(6,"(/,a,i3,a,2i3,a,2i3,a,i2,2a,2i3,a,i2,a,i4)") &
418 : 'mytid = ',mytid, ', magtidi,j = ', magtidi, magtidj, &
419 : ', mlat0,1 = ', mlat0, mlat1, ' (', nj, ')', &
420 : ', mlon0,1 = ', mlon0, mlon1, ' (', ni, ') ncells = ', ncells
421 : end if
422 : !
423 : ! Define all task structures with current task values
424 : ! (redundant for alltoall):
425 : !
426 295680 : do n=0,ntask-1
427 294912 : tasks(n)%magtidi = magtidi
428 294912 : tasks(n)%magtidj = magtidj
429 294912 : tasks(n)%nmaglats = nj
430 294912 : tasks(n)%nmaglons = ni
431 294912 : tasks(n)%mlat0 = mlat0
432 294912 : tasks(n)%mlat1 = mlat1
433 294912 : tasks(n)%mlon0 = mlon0
434 295680 : tasks(n)%mlon1 = mlon1
435 : enddo
436 : !
437 : ! All tasks must have at least 4 longitudes:
438 295680 : do n = 0, ntask-1
439 295680 : if (tasks(n)%nmaglons < 4) then
440 0 : write(errmsg, "(3a,i0,', nmaglons = ',i4)") '>>> ', subname, &
441 0 : ': each task must carry at least 4 longitudes. task = ', &
442 0 : n, tasks(n)%nmaglons
443 0 : if (masterproc) then
444 0 : write(iulog, errmsg)
445 : end if
446 0 : call endrun(errmsg)
447 : end if
448 : end do
449 : !
450 : ! Create subgroup communicators for each task column:
451 : ! These communicators will be used by sub mp_mag_jslot (mpi.F).
452 : !
453 : call mpi_comm_split(mpi_comm_edyn, mod(mytid,nmagtaski), mytid, &
454 768 : cols_comm, ier)
455 768 : call MPI_Comm_rank(cols_comm,tidcol,ier)
456 :
457 : if (debug .and. masterproc) then
458 : write(iulog,"(2a,i3,' mod(mytid,nmagtaski)=',i3,' tidcol=',i3)") &
459 : subname, ': nmagtaski = ', nmagtaski, mod(mytid,nmagtaski), tidcol
460 : end if
461 : end if
462 :
463 768 : end subroutine mp_distribute_mag
464 : !-----------------------------------------------------------------------
465 1536 : subroutine distribute_1d(n1,n2,nprocs,myrank,istart,iend)
466 : !
467 : ! Distribute work across a 1d vector(n1->n2) to nprocs.
468 : ! Return start and end indices for proc myrank.
469 : !
470 : ! Args:
471 : integer,intent(in) :: n1,n2,nprocs,myrank
472 : integer,intent(out) :: istart,iend
473 : !
474 : ! Local:
475 : integer :: lenproc,iremain,n
476 : !
477 1536 : n = n2-n1+1
478 1536 : lenproc = n/nprocs
479 1536 : iremain = mod(n,nprocs)
480 1536 : istart = n1 + myrank*lenproc + min(myrank,iremain)
481 1536 : iend = istart+lenproc-1
482 1536 : if (iremain > myrank) iend = iend+1
483 1536 : end subroutine distribute_1d
484 : !-----------------------------------------------------------------------
485 768 : subroutine mp_exchange_tasks(mpi_comm, iprint, gmlat)
486 : !
487 : ! Args:
488 : integer, intent(in) :: mpi_comm
489 : integer, intent(in) :: iprint
490 : real(r8), intent(in) :: gmlat(:)
491 : !
492 : ! Local:
493 : ! itasks_send(len_task_type,ntask) will be used to send tasks(:) info
494 : ! to all tasks (directly passing mpi derived data types is reportedly
495 : ! not stable, or not available until MPI 2.x).
496 : !
497 : integer :: n, ier
498 : integer, parameter :: len_task_type = 17 ! see type task above
499 : integer, allocatable :: &
500 768 : itasks_send(:,:), & ! send buffer
501 768 : itasks_recv(:,:) ! send buffer
502 : integer :: npes
503 :
504 768 : call mpi_comm_size(mpi_comm, npes, ier)
505 :
506 : !
507 : ! Pack tasks(mytid) into itasks_send:
508 2304 : allocate(itasks_send(len_task_type,0:npes-1),stat=ier)
509 768 : if (ier /= 0) then
510 0 : write(iulog,"(i4,i4)") '>>> Error allocating itasks_send: len_task_type=',&
511 0 : len_task_type,' npes=',npes
512 0 : call endrun('mp_exchange_tasks: unable to allocate itasks_send')
513 : endif
514 2304 : allocate(itasks_recv(len_task_type,0:npes-1),stat=ier)
515 768 : if (ier /= 0) then
516 0 : write(iulog,"(i4,i4)") '>>> Error allocating itasks_recv: len_task_type=',&
517 0 : len_task_type,' npes=',npes
518 0 : call endrun('mp_exchange_tasks: unable to allocate itasks_recv')
519 : endif
520 295680 : do n=0,npes-1
521 294912 : itasks_send(1,n) = tasks(mytid)%mytid
522 :
523 294912 : itasks_send(2,n) = tasks(mytid)%mytidi
524 294912 : itasks_send(3,n) = tasks(mytid)%mytidj
525 294912 : itasks_send(4,n) = tasks(mytid)%nlats
526 294912 : itasks_send(5,n) = tasks(mytid)%nlons
527 294912 : itasks_send(6,n) = tasks(mytid)%lat0
528 294912 : itasks_send(7,n) = tasks(mytid)%lat1
529 294912 : itasks_send(8,n) = tasks(mytid)%lon0
530 294912 : itasks_send(9,n) = tasks(mytid)%lon1
531 :
532 294912 : itasks_send(10,n) = tasks(mytid)%magtidi
533 294912 : itasks_send(11,n) = tasks(mytid)%magtidj
534 294912 : itasks_send(12,n) = tasks(mytid)%nmaglats
535 294912 : itasks_send(13,n) = tasks(mytid)%nmaglons
536 294912 : itasks_send(14,n) = tasks(mytid)%mlat0
537 294912 : itasks_send(15,n) = tasks(mytid)%mlat1
538 294912 : itasks_send(16,n) = tasks(mytid)%mlon0
539 295680 : itasks_send(17,n) = tasks(mytid)%mlon1
540 : enddo
541 : !
542 : ! Send itasks_send and receive itasks_recv:
543 : call mpi_alltoall(itasks_send,len_task_type,MPI_INTEGER,&
544 : itasks_recv,len_task_type,MPI_INTEGER,&
545 768 : mpi_comm,ier)
546 768 : if (ier /= 0) &
547 0 : call handle_mpi_err(ier,'edyn_mpi: mpi_alltoall to send/recv itasks')
548 : !
549 : ! Unpack itasks_recv into tasks(n)
550 : !
551 295680 : do n=0,npes-1
552 294912 : tasks(n)%mytid = itasks_recv(1,n)
553 :
554 294912 : tasks(n)%mytidi = itasks_recv(2,n)
555 294912 : tasks(n)%mytidj = itasks_recv(3,n)
556 294912 : tasks(n)%nlats = itasks_recv(4,n)
557 294912 : tasks(n)%nlons = itasks_recv(5,n)
558 294912 : tasks(n)%lat0 = itasks_recv(6,n)
559 294912 : tasks(n)%lat1 = itasks_recv(7,n)
560 294912 : tasks(n)%lon0 = itasks_recv(8,n)
561 294912 : tasks(n)%lon1 = itasks_recv(9,n)
562 :
563 294912 : tasks(n)%magtidi = itasks_recv(10,n)
564 294912 : tasks(n)%magtidj = itasks_recv(11,n)
565 294912 : tasks(n)%nmaglats = itasks_recv(12,n)
566 294912 : tasks(n)%nmaglons = itasks_recv(13,n)
567 294912 : tasks(n)%mlat0 = itasks_recv(14,n)
568 294912 : tasks(n)%mlat1 = itasks_recv(15,n)
569 294912 : tasks(n)%mlon0 = itasks_recv(16,n)
570 294912 : tasks(n)%mlon1 = itasks_recv(17,n)
571 : !
572 : ! Report to stdout:
573 : !
574 295680 : if (n==mytid.and.iprint > 0) then
575 0 : write(iulog,"(/,'Task ',i3,':')") n
576 0 : write(iulog,"(/,'Subdomain on geographic grid:')")
577 0 : write(iulog,"('tasks(',i3,')%mytid =',i3)") n,tasks(n)%mytid
578 0 : write(iulog,"('tasks(',i3,')%mytidi=',i3)") n,tasks(n)%mytidi
579 0 : write(iulog,"('tasks(',i3,')%mytidj=',i3)") n,tasks(n)%mytidj
580 0 : write(iulog,"('tasks(',i3,')%nlats =',i3)") n,tasks(n)%nlats
581 0 : write(iulog,"('tasks(',i3,')%nlons =',i3)") n,tasks(n)%nlons
582 0 : write(iulog,"('tasks(',i3,')%lat0 =',i3)") n,tasks(n)%lat0
583 0 : write(iulog,"('tasks(',i3,')%lat1 =',i3)") n,tasks(n)%lat1
584 0 : write(iulog,"('tasks(',i3,')%lon0 =',i3)") n,tasks(n)%lon0
585 0 : write(iulog,"('tasks(',i3,')%lon1 =',i3)") n,tasks(n)%lon1
586 : write(iulog,"('Number of geo subdomain grid points = ',i6)") &
587 0 : tasks(n)%nlons * tasks(n)%nlats
588 0 : write(iulog,"(/,'Subdomain on geomagnetic grid:')")
589 0 : write(iulog,"('tasks(',i3,')%magtidi=',i3)") n,tasks(n)%magtidi
590 0 : write(iulog,"('tasks(',i3,')%magtidj=',i3)") n,tasks(n)%magtidj
591 0 : write(iulog,"('tasks(',i3,')%nmaglats =',i3)") n,tasks(n)%nmaglats
592 0 : write(iulog,"('tasks(',i3,')%nmaglons =',i3)") n,tasks(n)%nmaglons
593 0 : write(iulog,"('tasks(',i3,')%mlat0 =',i3)") n,tasks(n)%mlat0
594 0 : write(iulog,"('tasks(',i3,')%mlat1 =',i3)") n,tasks(n)%mlat1
595 0 : write(iulog,"('tasks(',i3,')%mlon0 =',i3)") n,tasks(n)%mlon0
596 0 : write(iulog,"('tasks(',i3,')%mlon1 =',i3)") n,tasks(n)%mlon1
597 : write(iulog,"('Number of mag subdomain grid points = ',i6)") &
598 0 : tasks(n)%nmaglons * tasks(n)%nmaglats
599 : endif
600 : enddo
601 : !
602 : ! Release locally allocated space:
603 768 : deallocate(itasks_send)
604 768 : deallocate(itasks_recv)
605 : !
606 : ! mxlon / mxlat is the maximum number of lons / lats owned by any task:
607 768 : mxlon = -9999
608 295680 : do n= 0, npes-1
609 295680 : if (tasks(n)%nlons > mxlon) then
610 768 : mxlon = tasks(n)%nlons
611 : end if
612 : end do
613 768 : mxlat = -9999
614 295680 : do n = 0, npes-1
615 295680 : if (tasks(n)%nlats > mxlat) then
616 768 : mxlat = tasks(n)%nlats
617 : end if
618 : end do
619 : !
620 : ! mxmaglon / mxmaglat is max number of mag lons / lats owned by any task:
621 768 : mxmaglon = -9999
622 295680 : do n = 0, npes-1
623 295680 : if (tasks(n)%nmaglons > mxmaglon) then
624 768 : mxmaglon = tasks(n)%nmaglons
625 : end if
626 : end do
627 768 : mxmaglat = -9999
628 295680 : do n = 0, npes-1
629 295680 : if (tasks(n)%nmaglats > mxmaglat) then
630 768 : mxmaglat = tasks(n)%nmaglats
631 : end if
632 : end do
633 : !
634 : ! Find conjugate points for folding hemispheres:
635 768 : call conjugate_points(gmlat)
636 :
637 768 : end subroutine mp_exchange_tasks
638 : !-----------------------------------------------------------------------
639 7296 : subroutine mp_mageq(fin,fout,nf,mlon0,mlon1,mlat0,mlat1,nmlev)
640 : !
641 : ! Each task needs values of conductivities and adotv1,2 fields at the
642 : ! at the mag equator for its longitude subdomain (and all levels), for
643 : ! the fieldline integrations.
644 : !
645 : ! On input, fin is ped_mag, hal_mag, adotv1_mag, adotv2_mag
646 : ! on (i,j,k) magnetic subdomain.
647 : ! On output, fout(mlon0:mlon1,nmlev,nf) is ped_meq, hal_meq, adotv1_meq,
648 : ! adotv2_meq at mag equator at longitude subdomain and all levels.
649 : !
650 : ! Args:
651 : integer :: mlon0,mlon1,mlat0,mlat1,nmlev,nf
652 : real(r8),intent(in) :: fin (mlon0:mlon1,mlat0:mlat1,nmlev,nf)
653 : real(r8),intent(out) :: fout(mlon0:mlon1,nmlev,nf)
654 : !
655 : ! Local:
656 : real(r8) :: & ! mpi buffers
657 14592 : sndbuf(mxmaglon,nmlev,nf), & ! mxmaglon,nmlev,nf
658 7296 : rcvbuf(mxmaglon,nmlev,nf) ! mxmaglon,nmlev,nf
659 : integer :: i,j,n,itask,ier,len,jlateq,ireqsend,ireqrecv
660 : integer :: irstat(MPI_STATUS_SIZE) ! mpi receive status
661 : logical :: have_eq
662 :
663 30387840 : sndbuf = 0._r8
664 30387840 : rcvbuf = 0._r8
665 7296 : len = mxmaglon*nmlev*nf
666 : !
667 : ! If mag equator is in current subdomain, load it into sndbuf
668 : ! and send to other tasks in my task column (mytidi)
669 : !
670 7296 : jlateq = (nmlat+1)/2 ! lat index of mag equator (49)
671 7296 : have_eq = .false.
672 29412 : do j=mlat0,mlat1
673 29412 : if (j == jlateq) then ! load send buffer w/ data at equator
674 228 : have_eq = .true.
675 1767 : do i=mlon0,mlon1
676 808203 : sndbuf(i-mlon0+1,:,:) = fin(i,j,:,:)
677 : enddo
678 : !
679 : ! Send mag equator data to other tasks in my task column (mytidi):
680 87780 : do itask=0,ntask-1
681 87780 : if (itask /= mytid.and.tasks(itask)%mytidi==mytidi) then
682 : call mpi_isend(sndbuf,len,MPI_REAL8,itask,1, &
683 7068 : mpi_comm_edyn,ireqsend,ier)
684 7068 : if (ier /= 0) call handle_mpi_err(ier,'mp_mageq isend')
685 7068 : call mpi_wait(ireqsend,irstat,ier)
686 : endif ! another task in mytidi
687 : enddo ! itask=0,ntask-1
688 : endif ! j==jlateq
689 : enddo ! j=mlat0,mlat1
690 : !
691 : ! Receive by other tasks in the sending task's column:
692 29439360 : fout = 0._r8
693 7296 : if (.not.have_eq) then ! find task to receive from
694 2721180 : do itask=0,ntask-1
695 10948332 : do j=tasks(itask)%mlat0,tasks(itask)%mlat1
696 10941264 : if (j == jlateq.and.tasks(itask)%mytidi==mytidi) then
697 : call mpi_irecv(rcvbuf,len,MPI_REAL8,itask,1, &
698 7068 : mpi_comm_edyn,ireqrecv,ier)
699 7068 : if (ier /= 0) call handle_mpi_err(ier,'mp_mageq irecv')
700 7068 : call mpi_wait(ireqrecv,irstat,ier)
701 35340 : do n=1,nf
702 226176 : do i=mlon0,mlon1
703 25027788 : fout(i,:,n) = rcvbuf(i-mlon0+1,:,n)
704 : enddo
705 : enddo
706 : endif ! itask has mag eq and is in my column (sending task)
707 : enddo ! scan itask latitudes
708 : enddo ! task table search
709 : !
710 : ! If I am the sending task, set fout to equator values of input array:
711 : else
712 1140 : do n=1,nf
713 7296 : do i=mlon0,mlon1
714 807348 : fout(i,:,n) = fin(i,jlateq,:,n)
715 : enddo
716 : enddo
717 : endif ! I am receiving or sending task
718 7296 : end subroutine mp_mageq
719 : !-----------------------------------------------------------------------
720 7296 : subroutine mp_mageq_jpm1(f,mlon0,mlon1,mlat0,mlat1,nmlonp1,feq_jpm1,nf)
721 : !
722 : ! All tasks need data at mag latitudes equator-1, equator+1 at global
723 : ! longitudes.
724 : ! On input: f is 6 fields on mag subdomains: zigm11,zigm22,zigmc,zigm2,rim1,rim2
725 : ! On output: feq_jpm1(nmlonp1,2,nf)
726 : !
727 : ! Args:
728 : integer,intent(in) :: mlon0,mlon1,mlat0,mlat1,nmlonp1,nf
729 : real(r8),intent(in) :: f(mlon0:mlon1,mlat0:mlat1,nf)
730 : real(r8),intent(out) :: feq_jpm1(nmlonp1,2,nf) ! eq-1,eq+1
731 : !
732 : ! Local:
733 : integer :: j,ier,len,jlateq
734 7296 : real(r8) :: sndbuf(nmlonp1,2,nf)
735 :
736 8434176 : sndbuf = 0._r8
737 8434176 : feq_jpm1 = 0._r8
738 7296 : len = nmlonp1*2*nf
739 : !
740 : ! Load send buffer w/ eq +/- 1 for current subdomain
741 : ! (redundant to all tasks for alltoall)
742 : !
743 7296 : jlateq = (nmlat+1)/2
744 29412 : do j=mlat0,mlat1
745 29412 : if (j == jlateq+1) then ! equator+1
746 12597 : sndbuf(mlon0:mlon1,1,:) = f(mlon0:mlon1,j,:)
747 21888 : elseif (j == jlateq-1) then ! equator-1
748 12597 : sndbuf(mlon0:mlon1,2,:) = f(mlon0:mlon1,j,:)
749 : endif ! j==jlateq
750 : enddo ! j=mlat0,mlat1
751 : !
752 : ! Do the exchange:
753 : !
754 7296 : call mpi_allreduce( sndbuf(:,:,1:nf), feq_jpm1(:,:,1:nf), len, MPI_REAL8, MPI_SUM, mpi_comm_edyn, ier )
755 7296 : if ( ier .ne. MPI_SUCCESS ) call handle_mpi_err(ier,'mp_mageq_jpm1 call mpi_allreduce')
756 :
757 : !
758 : ! Periodic point:
759 160512 : feq_jpm1(nmlonp1,:,:) = feq_jpm1(1,:,:)
760 :
761 7296 : end subroutine mp_mageq_jpm1
762 : !-----------------------------------------------------------------------
763 7296 : subroutine mp_mageq_jpm3(f,mlon0,mlon1,mlat0,mlat1,nmlonp1,feq_jpm3,nf)
764 : !
765 : ! All tasks need global longitudes at mag latitudes equator,
766 : ! and equator +/- 1,2,3
767 : ! On input: f is nf fields on mag subdomains
768 : ! On output: feq_jpm3(nmlonp1,-3:3,nf) has global lons at eq, eq +/- 1,2,3
769 : ! 2nd dimension of feq_jpm3 (and send/recv buffers) is as follows:
770 : ! +3: eq+3
771 : ! +2: eq+2
772 : ! +1: eq+1
773 : ! 0: eq
774 : ! -1: eq-1
775 : ! -2: eq-2
776 : ! -3: eq-3
777 : !
778 : ! Args:
779 : integer,intent(in) :: mlon0,mlon1,mlat0,mlat1,nmlonp1,nf
780 : real(r8),intent(in) :: f(mlon0:mlon1,mlat0:mlat1,nf)
781 : real(r8),intent(out) :: feq_jpm3(nmlonp1,-3:3,nf)
782 : !
783 : ! Local:
784 : integer :: j,ier,len,jlateq
785 : integer,parameter :: mxnf=6
786 :
787 14592 : real(r8) :: sndbuf(nmlonp1,-3:3,mxnf)
788 :
789 7296 : if (nf > mxnf) then
790 : write(iulog,"('>>> mp_mageq_jpm3: nf=',i4,' but cannot be called with greater than mxnf=',i4)") &
791 0 : nf,mxnf
792 0 : call endrun('mp_mageq_jpm3')
793 : endif
794 :
795 25178496 : sndbuf = 0._r8
796 4202496 : feq_jpm3 = 0._r8
797 7296 : len = nmlonp1*7*nf
798 : !
799 : ! Load send buffer w/ eq +/- 3 for current subdomain
800 : !
801 7296 : jlateq = (nmlat+1)/2
802 29412 : do j=mlat0,mlat1
803 29412 : if (j == jlateq-3) then ! equator-3
804 1995 : sndbuf(mlon0:mlon1,-3,1:nf) = f(mlon0:mlon1,j,:)
805 21888 : elseif (j == jlateq-2) then ! equator-2
806 1995 : sndbuf(mlon0:mlon1,-2,1:nf) = f(mlon0:mlon1,j,:)
807 21660 : elseif (j == jlateq-1) then ! equator-1
808 1995 : sndbuf(mlon0:mlon1,-1,1:nf) = f(mlon0:mlon1,j,:)
809 21432 : elseif (j == jlateq) then ! equator
810 1995 : sndbuf(mlon0:mlon1,0,1:nf) = f(mlon0:mlon1,j,:)
811 21204 : elseif (j == jlateq+1) then ! equator+1
812 1995 : sndbuf(mlon0:mlon1,1,1:nf) = f(mlon0:mlon1,j,:)
813 20976 : elseif (j == jlateq+2) then ! equator+2
814 1995 : sndbuf(mlon0:mlon1,2,1:nf) = f(mlon0:mlon1,j,:)
815 20748 : elseif (j == jlateq+3) then ! equator+3
816 1995 : sndbuf(mlon0:mlon1,3,1:nf) = f(mlon0:mlon1,j,:)
817 : endif ! j==jlateq
818 : enddo ! j=mlat0,mlat1
819 : !
820 : ! Do the exchange:
821 : !
822 7296 : call mpi_allreduce( sndbuf(:,:,1:nf), feq_jpm3(:,:,1:nf), len, MPI_REAL8, MPI_SUM, mpi_comm_edyn, ier )
823 7296 : if ( ier .ne. MPI_SUCCESS ) call handle_mpi_err(ier,'mp_mageq_jpm3 call mpi_allreduce')
824 :
825 : !
826 : ! Periodic point:
827 65664 : feq_jpm3(nmlonp1,:,:) = feq_jpm3(1,:,:)
828 :
829 7296 : end subroutine mp_mageq_jpm3
830 : !-----------------------------------------------------------------------
831 14592 : subroutine mp_magpole_2d(f,ilon0,ilon1,ilat0,ilat1, &
832 14592 : nglblon,jspole,jnpole,fpole_jpm2,nf)
833 : !
834 : ! Return fpole_jpm2(nglblon,1->4,nf) as:
835 : ! 1: j = jspole+1 (spole+1)
836 : ! 2: j = jspole+2 (spole+2)
837 : ! 3: j = jnpole-1 (npole-1)
838 : ! 4: j = jnpole-2 (npole-2)
839 : ! This can be called with different number of fields nf, but cannot
840 : ! be called w/ > mxnf fields.
841 : !
842 : ! Args:
843 : integer,intent(in) :: ilon0,ilon1,ilat0,ilat1,nglblon,jspole,jnpole,nf
844 : real(r8),intent(in) :: f(ilon0:ilon1,ilat0:ilat1,nf)
845 : real(r8),intent(out) :: fpole_jpm2(nglblon,4,nf)
846 : !
847 : ! Local:
848 : integer :: j,ier,len
849 : integer,parameter :: mxnf=7
850 29184 : real(r8) :: sndbuf(nglblon,4,mxnf)
851 :
852 14592 : if (nf > mxnf) then
853 : write(iulog,"('>>> mp_magpole_2d: nf=',i4,' but cannot be called with greater than mxnf=',i4)") &
854 0 : nf,mxnf
855 0 : call endrun('mp_magpole_2d')
856 : endif
857 :
858 33619968 : sndbuf = 0._r8
859 26418816 : fpole_jpm2 = 0._r8
860 14592 : len = nglblon*4*nf
861 : !
862 : ! Load send buffer with values at poles +/- 2 for current subdomain
863 : !
864 58824 : do j=ilat0,ilat1
865 58824 : if (j==jspole+1) then ! south pole +1
866 19893 : sndbuf(ilon0:ilon1,1,1:nf) = f(ilon0:ilon1,j,:)
867 43776 : elseif (j==jspole+2) then ! south pole +2
868 19893 : sndbuf(ilon0:ilon1,2,1:nf) = f(ilon0:ilon1,j,:)
869 43320 : elseif (j==jnpole-1) then ! north pole -1
870 19893 : sndbuf(ilon0:ilon1,3,1:nf) = f(ilon0:ilon1,j,:)
871 42864 : elseif (j==jnpole-2) then ! north pole -2
872 19893 : sndbuf(ilon0:ilon1,4,1:nf) = f(ilon0:ilon1,j,:)
873 : endif
874 : enddo
875 :
876 : !
877 : ! Do the exchange:
878 : !
879 14592 : call mpi_allreduce( sndbuf(:,:,1:nf), fpole_jpm2(:,:,1:nf), len, MPI_REAL8, MPI_SUM, mpi_comm_edyn, ier )
880 14592 : if ( ier .ne. MPI_SUCCESS ) call handle_mpi_err(ier,'mp_magpole_2d call mpi_allreduce')
881 :
882 14592 : end subroutine mp_magpole_2d
883 : !-----------------------------------------------------------------------
884 7296 : subroutine mp_magpole_3d(f,ilon0,ilon1,ilat0,ilat1,nlev, nglblon,jspole,jnpole,fpole_jpm2,nf)
885 : !
886 : ! Return fpole_jpm2(nglblon,1->4,nlev,nf) as:
887 : ! 1: j = jspole+1 (spole+1)
888 : ! 2: j = jspole+2 (spole+2)
889 : ! 3: j = jnpole-1 (npole-1)
890 : ! 4: j = jnpole-2 (npole-2)
891 : ! This can be called with different number of fields nf, but cannot
892 : ! be called w/ > mxnf fields.
893 : !
894 : ! Args:
895 : integer,intent(in) :: ilon0,ilon1,ilat0,ilat1,nglblon,&
896 : jspole,jnpole,nf,nlev
897 : real(r8),intent(in) :: f(ilon0:ilon1,ilat0:ilat1,nlev,nf)
898 : real(r8),intent(out) :: fpole_jpm2(nglblon,4,nlev,nf)
899 : !
900 : ! Local:
901 : integer :: j,k,ier,len
902 : integer,parameter :: mxnf=6
903 14592 : real(r8) :: sndbuf(nglblon,4,nlev,mxnf)
904 :
905 7296 : if (nf > mxnf) then
906 : write(iulog,"('>>> mp_magpole_3d: nf=',i4,' but cannot be called with greater than mxnf=',i4)") &
907 0 : nf,mxnf
908 0 : call endrun('mp_magpole_3d')
909 : endif
910 :
911 1872350592 : sndbuf = 0._r8
912 312064512 : fpole_jpm2 = 0._r8
913 7296 : len = nglblon*4*nlev*nf
914 : !
915 : ! Load send buffer with values at poles +/- 2 for current subdomain
916 : !
917 29412 : do j=ilat0,ilat1
918 2904492 : do k=1,nlev
919 2897196 : if (j==jspole+1) then ! south pole +1
920 259350 : sndbuf(ilon0:ilon1,1,k,1:nf) = f(ilon0:ilon1,j,k,:)
921 2845440 : elseif (j==jspole+2) then ! south pole +2
922 259350 : sndbuf(ilon0:ilon1,2,k,1:nf) = f(ilon0:ilon1,j,k,:)
923 2815800 : elseif (j==jnpole-1) then ! north pole -1
924 259350 : sndbuf(ilon0:ilon1,3,k,1:nf) = f(ilon0:ilon1,j,k,:)
925 2786160 : elseif (j==jnpole-2) then ! north pole -2
926 259350 : sndbuf(ilon0:ilon1,4,k,1:nf) = f(ilon0:ilon1,j,k,:)
927 : endif
928 : enddo
929 : enddo
930 :
931 : !
932 : ! Do the exchange:
933 : !
934 7296 : call mpi_allreduce( sndbuf(:,:,:,1:nf), fpole_jpm2(:,:,:,1:nf), len, MPI_REAL8, MPI_SUM, mpi_comm_edyn, ier )
935 7296 : if ( ier .ne. MPI_SUCCESS ) call handle_mpi_err(ier,'mp_magpole_3d call mpi_allreduce')
936 :
937 7296 : end subroutine mp_magpole_3d
938 : !-----------------------------------------------------------------------
939 14592 : subroutine mp_magpoles(f,ilon0,ilon1,ilat0,ilat1,nglblon, jspole,jnpole,fpoles,nf)
940 : !
941 : ! Similiar to mp_magpole_2d, but returns global longitudes for
942 : ! j==1 and j==nmlat (not for poles +/- 2)
943 : ! Return fpoles(nglblon,2,nf) as:
944 : ! 1: j = jspole (spole)
945 : ! 2: j = jnpole (npole)
946 : ! This can be called with different number of fields nf, but cannot
947 : ! be called w/ > mxnf fields.
948 : !
949 : ! Args:
950 : integer,intent(in) :: ilon0,ilon1,ilat0,ilat1,nglblon, jspole,jnpole,nf
951 : real(r8),intent(in) :: f(ilon0:ilon1,ilat0:ilat1,nf)
952 : real(r8),intent(out) :: fpoles(nglblon,2,nf)
953 : !
954 : ! Local:
955 : integer :: j,ier,len
956 14592 : real(r8) :: sndbuf(nglblon,2,nf)
957 :
958 157717632 : sndbuf = 0._r8
959 157717632 : fpoles = 0._r8
960 14592 : len = nglblon*2*nf
961 : !
962 : ! Load send buffer with values at poles +/- 2 for current subdomain
963 : !
964 58824 : do j=ilat0,ilat1
965 58824 : if (j==jspole) then ! south pole
966 231933 : sndbuf(ilon0:ilon1,1,1:nf) = f(ilon0:ilon1,j,:)
967 43776 : elseif (j==jnpole) then ! npole pole
968 231933 : sndbuf(ilon0:ilon1,2,1:nf) = f(ilon0:ilon1,j,:)
969 : endif
970 : enddo
971 :
972 : !
973 : ! Do the exchange:
974 : !
975 14592 : call mpi_allreduce( sndbuf(:,:,1:nf), fpoles(:,:,1:nf), len, MPI_REAL8, MPI_SUM, mpi_comm_edyn, ier )
976 14592 : if ( ier .ne. MPI_SUCCESS ) call handle_mpi_err(ier,'mp_magpoles call mpi_allreduce')
977 :
978 14592 : end subroutine mp_magpoles
979 : !-----------------------------------------------------------------------
980 : integer function getpe(ix,jx)
981 : integer,intent(in) :: ix,jx
982 : integer :: it
983 :
984 : getpe = -1
985 : do it=0,ntask-1
986 : if ((tasks(it)%lon0 <= ix .and. tasks(it)%lon1 >= ix).and.&
987 : (tasks(it)%lat0 <= jx .and. tasks(it)%lat1 >= jx)) then
988 : getpe = it
989 : exit
990 : endif
991 : enddo
992 : if (getpe < 0) then
993 : write(iulog,"('getpe: pe with ix=',i4,' not found.')") ix
994 : call endrun('getpe')
995 : endif
996 : end function getpe
997 : !-----------------------------------------------------------------------
998 116736 : subroutine mp_pole_halos(f,lev0,lev1,lon0,lon1,lat0,lat1,nf,polesign)
999 : !
1000 : ! Set latitude halo points over the poles.
1001 : !
1002 : ! Args:
1003 : integer,intent(in) :: lev0,lev1,lon0,lon1,lat0,lat1,nf
1004 : real(r8),intent(in) :: polesign(nf)
1005 : type(array_ptr_type) :: f(nf) ! (plev,i0-2:i1+2,j0-2:j1+2)
1006 : !
1007 : ! Local:
1008 : integer :: if,i,j,k,ihalo,it,i0,i1,j0,j1,itask
1009 :
1010 : ! real(r8) :: fglblon(lev0:lev1,nlon,lat0-2:lat1+2,nf)
1011 109440 : type(array_ptr_type) :: pglblon(nf) ! (lev0:lev1,nlon,lat0-2:lat1+2)
1012 :
1013 116736 : if (mytidj /= 0 .and. mytidj /= ntaskj-1) return
1014 :
1015 : ! fglblon = 0._r8 ! init
1016 : !
1017 : ! Allocate local fields with global longitudes:
1018 30096 : do if=1,nf
1019 121296 : allocate(pglblon(if)%ptr(lev0:lev1,nlon_geo,lat0-2:lat1+2))
1020 : enddo
1021 : !
1022 : ! Define my subdomain in local fglblon, which has global lon dimension:
1023 : !
1024 30096 : do if=1,nf
1025 189696 : do j=lat0-2,lat1+2
1026 2097600 : do i=lon0,lon1
1027 251050800 : pglblon(if)%ptr(lev0:lev1,i,j) = f(if)%ptr(lev0:lev1,i,j)
1028 : enddo
1029 : enddo
1030 : enddo
1031 : !
1032 : ! Gather longitude data to westernmost processors (far north and south):
1033 : !
1034 7296 : call mp_gatherlons_f3d(pglblon,lev0,lev1,lon0,lon1,lat0-2,lat1+2,nf)
1035 : !
1036 : ! Loop over tasks in my latitude row (far north or far south),
1037 : ! including myself, and set halo points over the poles.
1038 : !
1039 7296 : if (mytidi==0) then
1040 7904 : do it=0,ntaski-1
1041 7296 : itask = tasks(itask_table_geo(it,mytidj))%mytid
1042 7296 : i0 = tasks(itask)%lon0
1043 7296 : i1 = tasks(itask)%lon1
1044 7296 : j0 = tasks(itask)%lat0
1045 7296 : j1 = tasks(itask)%lat1
1046 30704 : do if=1,nf
1047 30096 : if (j0==1) then ! south
1048 148200 : do i=i0,i1
1049 136800 : ihalo = 1+mod(i-1+nlon_geo/2,nlon_geo)
1050 17920800 : pglblon(if)%ptr(lev0:lev1,i,j0-2) = pglblon(if)%ptr(lev0:lev1,ihalo,j0+2) ! get lat -1 from lat 3
1051 17932200 : pglblon(if)%ptr(lev0:lev1,i,j0-1) = pglblon(if)%ptr(lev0:lev1,ihalo,j0+1) ! get lat 0 from lat 2
1052 : enddo
1053 : else ! north
1054 148200 : do i=i0,i1
1055 136800 : ihalo = 1+mod(i-1+nlon_geo/2,nlon_geo)
1056 17920800 : pglblon(if)%ptr(lev0:lev1,i,j1+1) = pglblon(if)%ptr(lev0:lev1,ihalo,j1-1) ! get lat plat+1 from plat-1
1057 17932200 : pglblon(if)%ptr(lev0:lev1,i,j1+2) = pglblon(if)%ptr(lev0:lev1,ihalo,j1-2) ! get lat plat+2 from plat-2
1058 : enddo
1059 : endif
1060 : enddo ! if=1,nf
1061 : enddo ! it=0,ntaski-1
1062 : endif ! mytidi==0
1063 : !
1064 : ! Scatter data back out to processors in my latitude row:
1065 : !
1066 7296 : call mp_scatterlons_f3d(pglblon,lev0,lev1,lon0,lon1,lat0-2,lat1+2,nf)
1067 : !
1068 : ! Finally, define halo points in data arrays from local global lon array,
1069 : ! changing sign if necessary (winds):
1070 : !
1071 7296 : if (lat0==1) then ! south
1072 15048 : do if=1,nf
1073 37848 : do j=lat0-2,lat0-1
1074 2998200 : do k=lev0,lev1
1075 38554800 : f(if)%ptr(k,lon0:lon1,j) = pglblon(if)%ptr(k,lon0:lon1,j)*polesign(if)
1076 : enddo
1077 : enddo
1078 : enddo
1079 : else ! north
1080 15048 : do if=1,nf
1081 37848 : do j=lat1+1,lat1+2
1082 2998200 : do k=lev0,lev1
1083 38554800 : f(if)%ptr(k,lon0:lon1,j) = pglblon(if)%ptr(k,lon0:lon1,j)*polesign(if)
1084 : enddo
1085 : enddo
1086 : enddo
1087 : endif
1088 :
1089 30096 : do if=1,nf
1090 30096 : deallocate(pglblon(if)%ptr)
1091 : enddo
1092 7296 : end subroutine mp_pole_halos
1093 : !-----------------------------------------------------------------------
1094 768 : subroutine conjugate_points(gmlat)
1095 :
1096 : real(r8), intent(in) :: gmlat(:)
1097 : !
1098 : ! Local:
1099 : integer :: ier,j,js,jn,itask,jj
1100 : !
1101 : ! nsend_south(ntask): number of lats in south to send north
1102 : ! nrecv_north(ntask): number of lats in north to recv from south
1103 : !
1104 2304 : allocate(nsend_south(0:ntask-1),stat=ier)
1105 1536 : allocate(nrecv_north(0:ntask-1),stat=ier)
1106 : !
1107 : ! send_south_coords: south j lats to send north
1108 : ! recv_north_coords: north j lats to recv from south
1109 : !
1110 3072 : allocate(send_south_coords(mxmaglat,0:ntask-1),stat=ier)
1111 2304 : allocate(recv_north_coords(mxmaglat,0:ntask-1),stat=ier)
1112 :
1113 295680 : nsend_south(:) = 0
1114 295680 : nrecv_north(:) = 0
1115 1475328 : send_south_coords(:,:) = 0
1116 1475328 : recv_north_coords(:,:) = 0
1117 :
1118 3096 : magloop: do j=mlat0,mlat1
1119 : !
1120 : ! In north hem: find tasks w/ conjugate points in south to recv:
1121 : ! (nmlath is in params module)
1122 3096 : if (gmlat(j) > 0._r8) then ! in north hem of current task
1123 1152 : js = nmlath-(j-nmlath) ! j index to south conjugate point (should be -j)
1124 443520 : do itask=0,ntask-1
1125 1784448 : do jj = tasks(itask)%mlat0,tasks(itask)%mlat1
1126 : !
1127 : ! Receive these north coords from the south:
1128 1340928 : if (jj==js.and.mlon0==tasks(itask)%mlon0.and. &
1129 442368 : mlon1==tasks(itask)%mlon1) then
1130 1152 : nrecv_north(itask) = nrecv_north(itask)+1
1131 1152 : recv_north_coords(nrecv_north(itask),itask) = j
1132 : endif
1133 : enddo ! jj of remote task
1134 : enddo ! itask=0,ntask-1
1135 106848 : if (all(nrecv_north==0)) &
1136 0 : write(iulog,"(2a,i4,a,f8.2)") '>>> WARNING: could not find north conjugate',&
1137 0 : ' points corresponding to south latitude js=',js,' gmlat(js)=',gmlat(js)
1138 : !
1139 : ! In south hem: find tasks w/ conjugate points in north to send:
1140 1176 : elseif (gmlat(j) < 0._r8.and.j /= nmlath) then ! in south hem
1141 1152 : jn = nmlath+(nmlath-j) ! j index of north conjugate point
1142 443520 : do itask=0,ntask-1
1143 1784448 : do jj = tasks(itask)%mlat0,tasks(itask)%mlat1
1144 1340928 : if (jj==jn.and.mlon0==tasks(itask)%mlon0.and. &
1145 442368 : mlon1==tasks(itask)%mlon1) then
1146 1152 : nsend_south(itask) = nsend_south(itask)+1
1147 : ! Send these south coords to the north:
1148 1152 : send_south_coords(nsend_south(itask),itask) = j
1149 : endif
1150 : enddo ! jj of remote task
1151 : enddo ! itask=0,ntask-1
1152 332352 : if (all(nsend_south==0)) &
1153 0 : write(iulog,"(2a,i4,a,f8.2)") '>>> WARNING: could not find south conjugate',&
1154 0 : ' points corresponding to north latitude jn=',jn,' gmlat(jn)=',gmlat(jn)
1155 : endif ! in north or south hem
1156 : enddo magloop ! j=mlat0,mlat1
1157 768 : end subroutine conjugate_points
1158 : !-----------------------------------------------------------------------
1159 7296 : subroutine mp_mag_foldhem(f,mlon0,mlon1,mlat0,mlat1,nf)
1160 : !
1161 : ! For each point in northern hemisphere (if any) of the current task
1162 : ! subdomain, receive data from conjugate point in the south (from the
1163 : ! south task that owns it), and sum it to the north point data.
1164 : ! Do this for nf fields. Conjugate point indices to send/recv to/from
1165 : ! each task were determined by sub conjugate_points (this module).
1166 : ! nsend_south, ! number of south lats to send to north (each task)
1167 : ! nrecv_north ! number of north lats to send to south (each task)
1168 : !
1169 : ! This routine is called from edynamo at every timestep.
1170 : ! Sub conjugate_points is called once per run, from mp_distribute.
1171 : !
1172 : ! Args:
1173 : integer,intent(in) :: mlon0,mlon1,mlat0,mlat1,nf
1174 : real(r8),intent(inout) :: f(mlon0:mlon1,mlat0:mlat1,nf)
1175 : !
1176 : ! Local:
1177 : integer :: j,n,len,itask,ifld,ier,nmlons
1178 14592 : real(r8) :: sndbuf(mxmaglon,mxmaglat,nf,0:ntask-1)
1179 14592 : real(r8) :: rcvbuf(mxmaglon,mxmaglat,nf,0:ntask-1)
1180 14592 : integer :: jsend(0:ntask-1),jrecv(0:ntask-1)
1181 : integer :: irstat(MPI_STATUS_SIZE) ! mpi receive status
1182 :
1183 : !
1184 1299986688 : sndbuf = 0._r8 ; rcvbuf = 0._r8
1185 5617920 : jsend = 0 ; jrecv = 0
1186 7296 : len = mxmaglon*mxmaglat*nf
1187 7296 : nmlons = mlon1-mlon0+1
1188 : !
1189 : ! Send south data to north itask:
1190 : ! (To avoid deadlock, do not send if north task is also myself. This will
1191 : ! happen when there is an odd number of tasks in the latitude dimension,
1192 : ! e.g., ntask == 12, 30, etc)
1193 : !
1194 2808960 : do itask=0,ntask-1
1195 :
1196 : ! Attempt to fetch from allocatable variable NSEND_SOUTH when it is not allocated
1197 :
1198 2816028 : if (nsend_south(itask) > 0 .and. itask /= mytid) then
1199 56544 : do ifld = 1,nf
1200 133152 : do n=1,nsend_south(itask)
1201 229824 : sndbuf(1:nmlons,n,ifld,itask) = &
1202 873012 : f(:,send_south_coords(n,itask),ifld)
1203 : enddo
1204 : enddo ! ifld=1,nf
1205 7068 : call mpi_isend(sndbuf(1,1,1,itask),len,MPI_REAL8, &
1206 7068 : itask,1,mpi_comm_edyn,jsend(itask),ier)
1207 7068 : call mpi_wait(jsend(itask),irstat,ier)
1208 : endif ! nsend_south(itask) > 0
1209 : enddo ! itask=0,ntask-1
1210 : !
1211 : ! Receive north data from south itask and add to north,
1212 : ! i.e., north = north+south. (do not receive if south task is
1213 : ! also myself, but do add south data to my north points, see below)
1214 : !
1215 2808960 : do itask=0,ntask-1
1216 2808960 : if (nrecv_north(itask) > 0 .and. itask /= mytid) then
1217 7068 : call mpi_irecv(rcvbuf(1,1,1,itask),len,MPI_REAL8, &
1218 7068 : itask,1,mpi_comm_edyn,jrecv(itask),ier)
1219 7068 : call mpi_wait(jrecv(itask),irstat,ier)
1220 56544 : do ifld=1,nf
1221 133152 : do n=1,nrecv_north(itask)
1222 : !
1223 : ! Receive lats in reverse order:
1224 : f(mlon0:mlon1, &
1225 : recv_north_coords(nrecv_north(itask)-n+1,itask),ifld) = &
1226 153216 : f(mlon0:mlon1, &
1227 0 : recv_north_coords(nrecv_north(itask)-n+1,itask),ifld) + &
1228 796404 : rcvbuf(1:nmlons,n,ifld,itask)
1229 : enddo ! n=1,nrecv_north(itask)
1230 : enddo ! ifld=1,nf
1231 : !
1232 : ! If I am send *and* receive task, simply add my south data to my north points:
1233 2794596 : elseif (nrecv_north(itask) > 0 .and. itask == mytid) then
1234 0 : do ifld=1,nf
1235 0 : do n=1,nrecv_north(itask)
1236 : f(mlon0:mlon1, &
1237 : recv_north_coords(nrecv_north(itask)-n+1,itask),ifld) = &
1238 0 : f(mlon0:mlon1, &
1239 0 : recv_north_coords(nrecv_north(itask)-n+1,itask),ifld) + &
1240 0 : f(mlon0:mlon1,send_south_coords(n,itask),ifld)
1241 : enddo ! n=1,nrecv_north(itask)
1242 : enddo ! ifld=1,nf
1243 : endif ! nrecv_north(itask) > 0
1244 : enddo ! itask=0,ntask-1
1245 : !
1246 : ! Mag equator is also "folded", but not included in conjugate points,
1247 : ! so double it here:
1248 29412 : do j=mlat0,mlat1
1249 29412 : if (j==nmlath) then
1250 1824 : do ifld=1,nf
1251 12597 : f(:,j,ifld) = f(:,j,ifld)+f(:,j,ifld)
1252 : enddo
1253 : endif
1254 : enddo
1255 :
1256 7296 : end subroutine mp_mag_foldhem
1257 : !-----------------------------------------------------------------------
1258 955776 : subroutine mp_mag_periodic_f2d(f,mlon0,mlon1,mlat0,mlat1,nf)
1259 : !
1260 : ! Args:
1261 : integer,intent(in) :: mlon0,mlon1,mlat0,mlat1,nf
1262 : real(r8),intent(inout) :: f(mlon0:mlon1,mlat0:mlat1,nf)
1263 : !
1264 : ! Local:
1265 : integer :: j,ier,idest,isrc,len,ireqsend,ireqrecv,msgtag
1266 1911552 : real(r8) :: sndbuf(mxmaglat,nf),rcvbuf(mxmaglat,nf)
1267 : integer :: irstat(MPI_STATUS_SIZE) ! mpi receive status
1268 :
1269 955776 : if (ntaski>1) then
1270 955776 : len = mxmaglat*nf
1271 : !
1272 : ! I am a western-most task. Send lon 1 to eastern-most tasks:
1273 955776 : if (mytidi==0) then
1274 79648 : idest = itask_table_mag(ntaski-1,mytidj)
1275 321081 : do j=mlat0,mlat1
1276 573572 : sndbuf(j-mlat0+1,:) = f(1,j,:)
1277 : enddo
1278 79648 : msgtag = mytid
1279 79648 : call mpi_isend(sndbuf,len,MPI_REAL8,idest,msgtag,mpi_comm_edyn, ireqsend,ier)
1280 79648 : if (ier /= 0) call handle_mpi_err(ier,'mp_mag_periodic_f2d send to idest')
1281 79648 : call mpi_wait(ireqsend,irstat,ier)
1282 79648 : if (ier /= 0) call handle_mpi_err(ier,'mp_mag_periodic_f2d wait for send')
1283 : !
1284 : ! I am eastern-most task. Receive lon 1 from western-most tasks,
1285 : ! and assign to nmlonp1:
1286 876128 : elseif (mytidi==ntaski-1) then
1287 79648 : isrc = itask_table_mag(0,mytidj)
1288 79648 : msgtag = isrc
1289 79648 : call mpi_irecv(rcvbuf,len,MPI_REAL8,isrc,msgtag,mpi_comm_edyn, ireqrecv,ier)
1290 79648 : if (ier /= 0) call handle_mpi_err(ier,'mp_mag_periodic_f2d recv from isrc')
1291 79648 : call mpi_wait(ireqrecv,irstat,ier)
1292 79648 : if (ier /= 0) call handle_mpi_err(ier,'mp_mag_periodic_f2d wait for recv')
1293 :
1294 321081 : do j=mlat0,mlat1
1295 573572 : f(nmlonp1,j,:) = rcvbuf(j-mlat0+1,:)
1296 : enddo
1297 : endif ! mytidi == 0 or ntaski-1
1298 : else
1299 0 : do j=mlat0,mlat1
1300 0 : f(nmlonp1,j,:) = f(1,j,:)
1301 : enddo
1302 : endif
1303 :
1304 955776 : end subroutine mp_mag_periodic_f2d
1305 : !-----------------------------------------------------------------------
1306 43776 : subroutine mp_mag_halos(fmsub,mlon0,mlon1,mlat0,mlat1,nf)
1307 : !
1308 : ! Exchange halo/ghost points between magnetic grid subdomains for nf fields.
1309 : ! Only a single halo point is required in both lon and lat dimensions.
1310 : ! Note that all tasks in any row of the task matrix have the same
1311 : ! mlat0,mlat1, and that all tasks in any column of the task matrix
1312 : ! have the same mlon0,mlon1.
1313 : ! Longitude halos are done first, exchanging mlat0:mlat1, then latitude
1314 : ! halos are done, exchanging mlon0-1:mlon1+1 (i.e., including the
1315 : ! longitude halos that were defined first).
1316 : !
1317 : ! Args:
1318 : integer,intent(in) :: mlon0,mlon1,mlat0,mlat1,nf
1319 : real(r8),intent(inout) :: fmsub(mlon0-1:mlon1+1,mlat0-1:mlat1+1,nf)
1320 : !
1321 : ! Local:
1322 : integer :: ifld,west,east,north,south,len,isend0,isend1, &
1323 : irecv0,irecv1,ier,nmlats,istat(MPI_STATUS_SIZE,4),ireq(4),nmlons
1324 87552 : real(r8),dimension(mlat1-mlat0+1,nf)::sndlon0,sndlon1,rcvlon0,rcvlon1
1325 : real(r8),dimension((mlon1+1)-(mlon0-1)+1,nf) :: &
1326 43776 : sndlat0,sndlat1,rcvlat0,rcvlat1
1327 :
1328 : !
1329 : ! Init send/recv buffers for lon halos:
1330 8205264 : sndlon0 = 0._r8 ; rcvlon0 = 0._r8
1331 8205264 : sndlon1 = 0._r8 ; rcvlon1 = 0._r8
1332 : !
1333 : ! Identify east and west neightbors:
1334 43776 : west = itask_table_mag(mytidi-1,mytidj)
1335 43776 : east = itask_table_mag(mytidi+1,mytidj)
1336 : !
1337 : ! Exchange mlat0:mlat1 (lat halos are not yet defined):
1338 43776 : nmlats = mlat1-mlat0+1
1339 43776 : len = nmlats*nf
1340 : !
1341 : ! Send mlon0 to the west neighbor, and mlon1 to the east.
1342 : ! However, tasks are periodic in longitude (see itask_table_mag),
1343 : ! and far west tasks send mlon0+1, and far east tasks send mlon1-1
1344 : !
1345 1050624 : do ifld=1,nf
1346 : ! Far west tasks send mlon0+1 to far east (periodic) tasks:
1347 1006848 : if (mytidi==0) then
1348 338238 : sndlon0(:,ifld) = fmsub(mlon0+1,mlat0:mlat1,ifld)
1349 : ! Interior tasks send mlon0 to west neighbor:
1350 : else
1351 3720618 : sndlon0(:,ifld) = fmsub(mlon0,mlat0:mlat1,ifld)
1352 : endif
1353 :
1354 : ! Far east tasks send mlon1-1 to far west (periodic) tasks:
1355 1050624 : if (mytidi==nmagtaski-1) then
1356 338238 : sndlon1(:,ifld) = fmsub(mlon1-1,mlat0:mlat1,ifld)
1357 : ! Interior tasks send mlon1 to east neighbor:
1358 : else
1359 3720618 : sndlon1(:,ifld) = fmsub(mlon1,mlat0:mlat1,ifld)
1360 : endif
1361 : enddo ! ifld=1,nf
1362 : !
1363 : ! Send mlon0 to the west:
1364 43776 : call mpi_isend(sndlon0,len,MPI_REAL8,west,1,mpi_comm_edyn,isend0,ier)
1365 43776 : if (ier /= 0) call handle_mpi_err(ier,'mp_mag_halos send mlon0 to west')
1366 : !
1367 : ! Send mlon1 to the east:
1368 43776 : call mpi_isend(sndlon1,len,MPI_REAL8,east,1,mpi_comm_edyn,isend1,ier)
1369 43776 : if (ier /= 0) call handle_mpi_err(ier,'mp_mag_halos send mlon1 to east')
1370 : !
1371 : ! Recv mlon0-1 from west:
1372 43776 : call mpi_irecv(rcvlon0,len,MPI_REAL8,west,1,mpi_comm_edyn,irecv0,ier)
1373 43776 : if (ier /= 0) call handle_mpi_err(ier,'mp_mag_halos recv mlon0 from west')
1374 : !
1375 : ! Recv mlon1+1 from east:
1376 43776 : call mpi_irecv(rcvlon1,len,MPI_REAL8,east,1,mpi_comm_edyn,irecv1,ier)
1377 43776 : if (ier /= 0) call handle_mpi_err(ier,'mp_mag_halos recv mlon1 from east')
1378 : !
1379 : ! Wait for completions:
1380 218880 : ireq = (/isend0,isend1,irecv0,irecv1/)
1381 43776 : istat = 0
1382 43776 : call mpi_waitall(4,ireq,istat,ier)
1383 43776 : if (ier /= 0) call handle_mpi_err(ier,'mp_mag_halos waitall for lons')
1384 : !
1385 : ! Copy mlon0-1 from rcvlon0, and mlon1+1 from rcvlon1:
1386 1050624 : do ifld=1,nf
1387 4058856 : fmsub(mlon0-1,mlat0:mlat1,ifld) = rcvlon0(:,ifld)
1388 4058856 : fmsub(mlon1+1,mlat0:mlat1,ifld) = rcvlon1(:,ifld)
1389 : !
1390 : ! Fix special case of 2 tasks in longitude dimension:
1391 1050624 : if (east == west) then
1392 0 : fmsub(mlon0-1,mlat0:mlat1,ifld) = rcvlon1(:,ifld)
1393 0 : fmsub(mlon1+1,mlat0:mlat1,ifld) = rcvlon0(:,ifld)
1394 : endif
1395 : enddo ! ifld=1,nf
1396 : !
1397 : ! Now exchange latitudes:
1398 19721088 : sndlat0 = 0._r8 ; rcvlat0 = 0._r8
1399 19721088 : sndlat1 = 0._r8 ; rcvlat1 = 0._r8
1400 :
1401 43776 : south = itask_table_mag(mytidi,mytidj-1) ! neighbor to south
1402 43776 : north = itask_table_mag(mytidi,mytidj+1) ! neighbor to north
1403 : !
1404 : ! Include halo longitudes that were defined by the exchanges above:
1405 43776 : nmlons = (mlon1+1)-(mlon0-1)+1
1406 43776 : len = nmlons*nf
1407 : !
1408 : ! Send mlat0 to south neighbor, and mlat1 to north:
1409 1050624 : do ifld=1,nf
1410 9816768 : sndlat0(:,ifld) = fmsub(:,mlat0,ifld)
1411 9860544 : sndlat1(:,ifld) = fmsub(:,mlat1,ifld)
1412 : enddo
1413 : !
1414 : ! Send mlat0 to south:
1415 43776 : call mpi_isend(sndlat0,len,MPI_REAL8,south,1,mpi_comm_edyn,isend0,ier)
1416 43776 : if (ier /= 0) call handle_mpi_err(ier,'mp_mag_halos send mlat0 to south')
1417 : !
1418 : ! Send mlat1 to north:
1419 43776 : call mpi_isend(sndlat1,len,MPI_REAL8,north,1,mpi_comm_edyn,isend1,ier)
1420 43776 : if (ier /= 0) call handle_mpi_err(ier,'mp_mag_halos send mlat1 to north')
1421 : !
1422 : ! Recv mlat0-1 from south:
1423 43776 : call mpi_irecv(rcvlat0,len,MPI_REAL8,south,1,mpi_comm_edyn,irecv0,ier)
1424 43776 : if (ier /= 0) call handle_mpi_err(ier,'mp_mag_halos recv mlat0-1 from south')
1425 : !
1426 : ! Recv mlat1+1 from north:
1427 43776 : call mpi_irecv(rcvlat1,len,MPI_REAL8,north,1,mpi_comm_edyn,irecv1,ier)
1428 43776 : if (ier /= 0) call handle_mpi_err(ier,'mp_mag_halos recv mlat1+1 from north')
1429 : !
1430 : ! Wait for completions:
1431 218880 : ireq = (/isend0,isend1,irecv0,irecv1/)
1432 43776 : istat = 0
1433 43776 : call mpi_waitall(4,ireq,istat,ier)
1434 43776 : if (ier /= 0) call handle_mpi_err(ier,'mp_mag_halos waitall for lats')
1435 : !
1436 : ! Copy mlat0-1 from rcvlat0, and mlat1+1 from rcvlat1:
1437 1050624 : do ifld=1,nf
1438 9816768 : fmsub(:,mlat0-1,ifld) = rcvlat0(:,ifld)
1439 9860544 : fmsub(:,mlat1+1,ifld) = rcvlat1(:,ifld)
1440 : enddo ! ifld=1,nf
1441 :
1442 43776 : end subroutine mp_mag_halos
1443 : !-----------------------------------------------------------------------
1444 116736 : subroutine mp_geo_halos(fmsub,lev0,lev1,lon0,lon1,lat0,lat1,nf)
1445 : !
1446 : ! Exchange halo/ghost points between geographic grid subdomains for nf fields.
1447 : ! Two halo points are set in both lon and lat dimensions.
1448 : ! Longitude halos are done first, then latitude halos are done, including
1449 : ! longitude halos that were defined first).
1450 : !
1451 : ! Args:
1452 : integer,intent(in) :: lev0,lev1,lon0,lon1,lat0,lat1,nf
1453 : type(array_ptr_type) :: fmsub(nf) ! (lev0:lev1,lon0-2:lon1+2,lat0-2:lat1+2)
1454 :
1455 : !
1456 : ! Local:
1457 : integer :: k,i,ifld,west,east,north,south,len,isend0,isend1, &
1458 : irecv0,irecv1,ier,nlats,istat(MPI_STATUS_SIZE,4),ireq(4),nlons
1459 : real(r8),dimension(lev0:lev1,2,lat1-lat0+1,nf) :: &
1460 233472 : sndlon0,sndlon1,rcvlon0,rcvlon1
1461 : real(r8),dimension(lev0:lev1,2,(lon1+2)-(lon0-2)+1,nf) :: &
1462 233472 : sndlat0,sndlat1,rcvlat0,rcvlat1
1463 :
1464 : ! if (mpi_timing) starttime = mpi_wtime()
1465 : !
1466 : ! Init send/recv buffers for lon halos:
1467 576617472 : sndlon0 = 0._r8 ; rcvlon0 = 0._r8
1468 576617472 : sndlon1 = 0._r8 ; rcvlon1 = 0._r8
1469 : !
1470 : ! Identify east and west neighbors:
1471 116736 : west = itask_table_geo(mytidi-1,mytidj)
1472 116736 : east = itask_table_geo(mytidi+1,mytidj)
1473 : !
1474 : ! Exchange lat0:lat1 (lat halos are not yet defined):
1475 116736 : nlats = lat1-lat0+1
1476 116736 : len = (lev1-lev0+1)*2*nlats*nf
1477 : !
1478 : ! Send lon0:lon0+1 to the west neighbor, and lon1-1:lon1 to the east.
1479 : !
1480 481536 : do ifld=1,nf
1481 1211136 : do i=1,2
1482 95942400 : do k=lev0,lev1
1483 379392000 : sndlon0(k,i,:,ifld) = fmsub(ifld)%ptr(k,lon0+i-1,lat0:lat1) ! lon0, lon0+1
1484 380121600 : sndlon1(k,i,:,ifld) = fmsub(ifld)%ptr(k,lon1+i-2,lat0:lat1) ! lon1-1, lon1
1485 : enddo
1486 : enddo
1487 : enddo ! ifld=1,nf
1488 : !
1489 : ! Send lon0:lon0+1 to the west:
1490 116736 : call mpi_isend(sndlon0,len,MPI_REAL8,west,1,mpi_comm_edyn,isend0,ier)
1491 116736 : if (ier /= 0) call handle_mpi_err(ier, &
1492 0 : 'mp_geo_halos send lon0:lon0+1 to west')
1493 : !
1494 : ! Send lon1-1:lon1 to the east:
1495 116736 : call mpi_isend(sndlon1,len,MPI_REAL8,east,1,mpi_comm_edyn,isend1,ier)
1496 116736 : if (ier /= 0) call handle_mpi_err(ier, &
1497 0 : 'mp_geo_halos send lon1-1:lon1 to east')
1498 : !
1499 : ! Recv lon0-2:lon0-1 from west:
1500 116736 : call mpi_irecv(rcvlon0,len,MPI_REAL8,west,1,mpi_comm_edyn,irecv0,ier)
1501 116736 : if (ier /= 0) call handle_mpi_err(ier, &
1502 0 : 'mp_geo_halos recv lon0-2:lon0-1 from west')
1503 : !
1504 : ! Recv lon1+1:lon1+2 from east:
1505 116736 : call mpi_irecv(rcvlon1,len,MPI_REAL8,east,1,mpi_comm_edyn,irecv1,ier)
1506 116736 : if (ier /= 0) call handle_mpi_err(ier, &
1507 0 : 'mp_geo_halos recv lon1+1:lon1+2 from east')
1508 : !
1509 : ! Wait for completions:
1510 583680 : ireq = (/isend0,isend1,irecv0,irecv1/)
1511 116736 : istat = 0
1512 116736 : call mpi_waitall(4,ireq,istat,ier)
1513 116736 : if (ier /= 0) call handle_mpi_err(ier, &
1514 0 : 'mp_geo_halos waitall for lons')
1515 : !
1516 : ! Copy lon0-2:lon0-1 from rcvlon0, and lon1+1:lon1+2 from rcvlon1:
1517 481536 : do ifld=1,nf
1518 481536 : if (east /= west) then
1519 1094400 : do i=1,2
1520 95942400 : do k=lev0,lev1
1521 379392000 : fmsub(ifld)%ptr(k,lon0-3+i,lat0:lat1) = rcvlon0(k,i,:,ifld) ! lon0-2, lon0-1
1522 380121600 : fmsub(ifld)%ptr(k,lon1+i ,lat0:lat1) = rcvlon1(k,i,:,ifld) ! lon1+1, lon1+2
1523 : enddo
1524 : enddo ! i=1,2
1525 : !
1526 : ! Fix special case of 2 tasks in longitude dimension:
1527 : else ! east==west
1528 0 : do i=1,2
1529 0 : do k=lev0,lev1
1530 0 : fmsub(ifld)%ptr(k,lon0-3+i,lat0:lat1) = rcvlon1(k,i,:,ifld) ! lon0-2, lon0-1
1531 0 : fmsub(ifld)%ptr(k,lon1+i ,lat0:lat1) = rcvlon0(k,i,:,ifld) ! lon1+1, lon1+2
1532 : enddo
1533 : enddo
1534 : endif ! east==west
1535 : enddo ! ifld=1,nf
1536 : !
1537 : ! Now exchange latitudes:
1538 3071119872 : sndlat0 = 0._r8 ; rcvlat0 = 0._r8
1539 3071119872 : sndlat1 = 0._r8 ; rcvlat1 = 0._r8
1540 :
1541 116736 : south = itask_table_geo(mytidi,mytidj-1) ! neighbor to south
1542 116736 : north = itask_table_geo(mytidi,mytidj+1) ! neighbor to north
1543 : !
1544 : ! Include halo longitudes that were defined by the exchanges above:
1545 116736 : nlons = (lon1+2)-(lon0-2)+1
1546 116736 : len = (lev1-lev0+1)*2*nlons*nf
1547 : !
1548 : ! Send lat0:lat0+1 to south neighbor, and lat1-1:lat1 to north:
1549 481536 : do ifld=1,nf
1550 47905536 : do k=lev0,lev1
1551 806208000 : sndlat0(k,1,:,ifld) = fmsub(ifld)%ptr(k,:,lat0 ) ! send lat0 to south
1552 806208000 : sndlat0(k,2,:,ifld) = fmsub(ifld)%ptr(k,:,lat0+1) ! send lat0+1 to south
1553 :
1554 806208000 : sndlat1(k,1,:,ifld) = fmsub(ifld)%ptr(k,:,lat1 ) ! send lat1 to north
1555 806572800 : sndlat1(k,2,:,ifld) = fmsub(ifld)%ptr(k,:,lat1-1) ! send lat1-1 to north
1556 : enddo
1557 : enddo
1558 : !
1559 : ! Send lat0:lat0+1 to south (matching recv is lat1+1:lat1+2):
1560 116736 : call mpi_isend(sndlat0,len,MPI_REAL8,south,100,mpi_comm_edyn,isend0,ier)
1561 116736 : if (ier /= 0) call handle_mpi_err(ier, &
1562 0 : 'mp_geo_halos send lat0:lat0+1 to south')
1563 : !
1564 : ! Send lat1-1:lat1 to north (matching recv is lat0-2:lat0-1):
1565 116736 : call mpi_isend(sndlat1,len,MPI_REAL8,north,101,mpi_comm_edyn,isend1,ier)
1566 116736 : if (ier /= 0) call handle_mpi_err(ier, &
1567 0 : 'mp_geo_halos send lat1-1:lat1 to north')
1568 : !
1569 : ! Recv lat0-2:lat0-1 from south:
1570 116736 : call mpi_irecv(rcvlat0,len,MPI_REAL8,south,101,mpi_comm_edyn,irecv0,ier)
1571 116736 : if (ier /= 0) call handle_mpi_err(ier, &
1572 0 : 'mp_geo_halos recv lat0-2:lat0-1 from south')
1573 : !
1574 : ! Recv lat1+1:lat1+2 from north:
1575 116736 : call mpi_irecv(rcvlat1,len,MPI_REAL8,north,100,mpi_comm_edyn,irecv1,ier)
1576 116736 : if (ier /= 0) call handle_mpi_err(ier, &
1577 0 : 'mp_geo_halos recv lat1+1:lat1+2 from north')
1578 : !
1579 : ! Wait for completions:
1580 583680 : ireq = (/isend0,isend1,irecv0,irecv1/)
1581 116736 : istat = 0
1582 116736 : call mpi_waitall(4,ireq,istat,ier)
1583 116736 : if (ier /= 0) call handle_mpi_err(ier, &
1584 0 : 'mp_geo_halos waitall for lats')
1585 : !
1586 : ! Copy lat0-2:lat0-1 from rcvlat0, and lat1+1:lat1+2 from rcvlat1:
1587 481536 : do ifld=1,nf
1588 47788800 : do k=lev0,lev1
1589 806208000 : fmsub(ifld)%ptr(k,:,lat0-1) = rcvlat0(k,1,:,ifld) ! recv lat0-1 from south
1590 806208000 : fmsub(ifld)%ptr(k,:,lat0-2) = rcvlat0(k,2,:,ifld) ! recv lat0-2 from south
1591 :
1592 806208000 : fmsub(ifld)%ptr(k,:,lat1+1) = rcvlat1(k,1,:,ifld) ! recv lat1+1 from north
1593 806572800 : fmsub(ifld)%ptr(k,:,lat1+2) = rcvlat1(k,2,:,ifld) ! recv lat1+2 from north
1594 : enddo
1595 : !
1596 : ! Fix special case of 2 tasks in latitude dimension:
1597 : ! Not sure if this will happen in WACCM:
1598 : !
1599 481536 : if (north == south) then
1600 0 : call endrun('mp_geo_halos: north==south')
1601 : endif
1602 : enddo ! ifld=1,nf
1603 :
1604 116736 : end subroutine mp_geo_halos
1605 : !-----------------------------------------------------------------------
1606 7296 : subroutine mp_gather_edyn(fmsub,mlon0,mlon1,mlat0,mlat1,fmglb,nmlonp1,nmlat,nf)
1607 : !
1608 : ! Gather fields on mag subdomains to root task, so root task can
1609 : ! complete non-parallel portion of dynamo (starting after rhspde)
1610 : !
1611 : ! Args:
1612 : integer,intent(in) :: mlon0,mlon1,mlat0,mlat1,nmlonp1,nmlat,nf
1613 : real(r8),intent(in) :: fmsub(mlon0:mlon1,mlat0:mlat1,nf)
1614 : real(r8),intent(out) :: fmglb(nmlonp1,nmlat,nf)
1615 : !
1616 : ! Local:
1617 : integer :: len,i,j,ifld,ier
1618 7296 : real(r8),dimension(nmlonp1,nmlat,nf) :: sndbuf
1619 :
1620 406285056 : sndbuf = 0._r8
1621 406285056 : fmglb = 0._r8
1622 :
1623 7296 : len = nmlonp1*nmlat*nf
1624 : !
1625 : ! Load send buffer with my subdomain:
1626 58368 : do ifld=1,nf
1627 213180 : do j=mlat0,mlat1
1628 1250865 : do i=mlon0, mlon1
1629 1199793 : sndbuf(i,j,ifld) = fmsub(i,j,ifld)
1630 : enddo
1631 : enddo
1632 : enddo
1633 :
1634 : !
1635 : ! Gather to root by using scalable reduce method:
1636 :
1637 7296 : call mpi_reduce(sndbuf, fmglb, len, MPI_REAL8, MPI_SUM, 0, mpi_comm_edyn, ier )
1638 7296 : if (ier /= 0) call handle_mpi_err(ier,'mp_gather_edyn: mpi_gather to root')
1639 :
1640 7296 : end subroutine mp_gather_edyn
1641 : !-----------------------------------------------------------------------
1642 7296 : subroutine mp_scatter_phim(phim_glb,phim)
1643 : real(r8),intent(in) :: phim_glb(nmlonp1,nmlat)
1644 : real(r8),intent(out) :: phim(mlon0:mlon1,mlat0:mlat1)
1645 : !
1646 : ! Local:
1647 : integer :: ier,len,i,j
1648 :
1649 : ! if (mpi_timing) starttime = mpi_wtime()
1650 : !
1651 : ! Broadcast global phim (from pdynamo phim(nmlonp1,nmlat)):
1652 7296 : len = nmlat*nmlonp1
1653 7296 : call mpi_bcast(phim_glb,len,MPI_REAL8,0,mpi_comm_edyn,ier)
1654 7296 : if (ier /= 0) &
1655 0 : call handle_mpi_err(ier,'mp_scatter_phim: bcast global phim')
1656 : !
1657 : ! Define subdomains:
1658 29412 : do j=mlat0,mlat1
1659 178695 : do i=mlon0,mlon1
1660 171399 : phim(i,j) = phim_glb(i,j)
1661 : enddo
1662 : enddo
1663 :
1664 7296 : end subroutine mp_scatter_phim
1665 : !-----------------------------------------------------------------------
1666 7296 : subroutine mp_mag_jslot(fin,mlon00,mlon11,mlat00,mlat11, &
1667 7296 : fout,jneed,mxneed,nf)
1668 : !
1669 : ! Current task needs to receive (from other tasks) field f at (non-zero)
1670 : ! latitude indices in jneed, at all longitudes in the current subdomain.
1671 : ! Note subdomains include halo points mlon0-1 and mlat1+1. Data in f also
1672 : ! includes halo points (will need the lat data at halo-longitudes)
1673 : !
1674 : ! Args:
1675 : integer,intent(in) :: mlon00,mlon11,mlat00,mlat11 ! subdomains w/ halos
1676 : integer,intent(in) :: nf ! number of fields
1677 : integer,intent(in) :: mxneed ! max number of needed lats (nmlat+2)
1678 : integer,intent(in) :: jneed(mxneed) ! j-indices of needed lats (where /= -1)
1679 : real(r8),intent(in) :: fin(mlon00:mlon11,mlat00:mlat11,nf) ! data at current subdomain
1680 : real(r8),intent(out) :: fout(mlon00:mlon11,mxneed,nf) ! returned data at needed lats
1681 : !
1682 : ! Local:
1683 : integer,parameter :: sndbuf_cntr_max = 40 ! Maximum number of ibsend from one mpi task
1684 : integer :: ier,njneed,i,j,n,nj,idest, &
1685 : icount,len,nlons,isrc,msgid,ifld,sndbuf_cntr
1686 : integer :: tij ! rank in cols_comm (0 to nmagtaskj-1)
1687 14592 : integer :: jhave(mxneed),njhave,wid
1688 14592 : integer :: peersneed(mxneed,0:nmagtaskj-1)
1689 14592 : integer :: jneedall (mxneed,0:nmagtaskj-1)
1690 14592 : real(r8) :: sndbuf(mxmaglon+2,mxneed,nf,sndbuf_cntr_max)
1691 14592 : real(r8) :: rcvbuf(mxmaglon+2,mxneed,nf)
1692 14592 : real(r8) :: buffer((mxmaglon+2)*mxneed*nf*sndbuf_cntr_max)
1693 : integer :: irstat(MPI_STATUS_SIZE) ! mpi receive status
1694 : integer :: isstat(MPI_STATUS_SIZE,sndbuf_cntr_max) !mpi_ibsend wait status
1695 : integer :: ibsend_requests(sndbuf_cntr_max) !array of ibsend requests
1696 :
1697 1446366336 : sndbuf = 0._r8
1698 36158976 : rcvbuf = 0._r8
1699 7296 : njneed = 0
1700 7296 : ibsend_requests = 0
1701 7296 : sndbuf_cntr = 0
1702 729600 : do j=1,mxneed
1703 729600 : if (jneed(j) /= -1) njneed=njneed+1
1704 : enddo
1705 45790 : if (any(jneed(1:njneed)==-1)) call endrun('mp_mag_jslot jneed')
1706 : !
1707 7296 : call MPI_Comm_rank(cols_comm,tij,ier)
1708 7296 : call MPI_buffer_attach(buffer,(mxmaglon+2)*mxneed*nf*sndbuf_cntr_max,ier)
1709 7296 : if (ier /= 0) &
1710 0 : call handle_mpi_err(ier,'mp_mag_jslot call mpi_buffer_attach')
1711 :
1712 : !
1713 : ! Send needed lat indices to all tasks in my column:
1714 : ! (redundant for alltoall)
1715 240768 : do n=0,nmagtaskj-1
1716 23354496 : jneedall(:,n) = jneed(:)
1717 : enddo
1718 :
1719 : call mpi_alltoall(jneedall,mxneed,MPI_INTEGER, &
1720 7296 : peersneed,mxneed,MPI_INTEGER,cols_comm,ier)
1721 7296 : if (ier /= 0) &
1722 0 : call handle_mpi_err(ier,'mp_mag_jslot call mpi_alltoall')
1723 : !
1724 : ! Check if I have any needed lats, and who to send to:
1725 240768 : do n=0,nmagtaskj-1
1726 233472 : if (n==tij) cycle
1727 : njhave = 0
1728 22617600 : do j=1,mxneed
1729 22617600 : if (peersneed(j,n) >= mlat00.and.peersneed(j,n) <= mlat11)then
1730 50167 : njhave = njhave+1
1731 50167 : jhave(njhave) = peersneed(j,n)
1732 50167 : idest = n
1733 50167 : wid = itask_table_geo(mytidi,idest)
1734 : endif
1735 : enddo
1736 233472 : if (njhave > 0) then
1737 :
1738 14406 : sndbuf_cntr = sndbuf_cntr + 1
1739 14406 : if (sndbuf_cntr > sndbuf_cntr_max) call endrun('sndbuf_cntr exceeded sndbuf_cntr_max')
1740 :
1741 : !
1742 : ! Load send buffer:
1743 14406 : nlons = mlon11-mlon00+1
1744 86436 : do ifld=1,nf
1745 337271 : do j=1,njhave
1746 2519930 : do i=mlon00,mlon11
1747 2447900 : sndbuf(i-mlon00+1,j,ifld,sndbuf_cntr) = fin(i,jhave(j),ifld)
1748 : enddo
1749 : enddo
1750 : enddo
1751 14406 : len = nlons*njhave*nf
1752 14406 : msgid = mytid+wid*10000
1753 57624 : call mpi_ibsend(sndbuf(1:nlons,1:njhave,:,sndbuf_cntr),len,MPI_REAL8, &
1754 5111890 : idest,msgid,cols_comm,ibsend_requests(sndbuf_cntr),ier)
1755 14406 : if (ier /= 0) &
1756 0 : call handle_mpi_err(ier,'mp_mag_jslot call mpi_ibsend')
1757 : endif
1758 : enddo ! n=0,nmagtaskj-1
1759 :
1760 7296 : call MPI_waitall(sndbuf_cntr,ibsend_requests,isstat,ier)
1761 7296 : if (ier /= 0) &
1762 0 : call handle_mpi_err(ier,'mp_mag_jslot call mpi_waitall')
1763 7296 : call MPI_buffer_detach(buffer,(mxmaglon+2)*mxneed*nf*sndbuf_cntr_max,ier)
1764 7296 : if (ier /= 0) &
1765 0 : call handle_mpi_err(ier,'mp_mag_jslot call mpi_buffer_detach')
1766 :
1767 : !
1768 : ! Determine which tasks to receive which lats from. Task to
1769 : ! receive from must be in same task column magtidi as I am.
1770 7296 : if (njneed > 0) then
1771 398800 : njhave = 0
1772 398800 : jhave(:) = -1
1773 1535380 : do n=0,ntask-1
1774 : njhave = 0
1775 13511424 : do j=1,njneed
1776 11980032 : if (jneed(j) >= tasks(n)%mlat0-1 .and. &
1777 1531392 : jneed(j) <= tasks(n)%mlat1+1) then
1778 602004 : njhave = njhave+1
1779 602004 : jhave(njhave) = jneed(j)
1780 : endif
1781 : enddo
1782 1535380 : if (njhave > 0 .and. tasks(n)%magtidi==magtidi) then
1783 14406 : isrc = tasks(n)%magtidj ! task id in cols_comm to recv from
1784 14406 : nlons = mlon11-mlon00+1
1785 14406 : len = nlons*njhave*nf
1786 14406 : msgid = mytid*10000+n
1787 71396136 : rcvbuf = 0._r8
1788 28812 : call mpi_recv(rcvbuf(1:nlons,1:njhave,:),len,MPI_REAL8, &
1789 5097484 : isrc,msgid,cols_comm,irstat,ier)
1790 14406 : if (ier /= 0) &
1791 0 : call handle_mpi_err(ier,'mp_mag_jslot call mpi_recv')
1792 : !
1793 : ! Get data from receive buffer:
1794 : ! real,intent(out) :: fout(mlon00:mlon11,mxneed) ! returned data at needed lats
1795 86436 : do ifld=1,nf
1796 337271 : do j=1,njhave
1797 250835 : nj = ixfind(jneed,mxneed,jhave(j),icount)
1798 250835 : if (nj==0) call endrun('jhave(j) not in jneed')
1799 2519930 : do i=mlon00,mlon11
1800 2447900 : fout(i,nj,ifld) = rcvbuf(i-mlon00+1,j,ifld)
1801 : enddo
1802 : enddo ! j=1,njhave
1803 : enddo ! ifld=1,nf
1804 : endif ! jhave > 0
1805 : enddo ! n=0,ntask-1
1806 : endif ! njneed > 0
1807 :
1808 7296 : end subroutine mp_mag_jslot
1809 : !-----------------------------------------------------------------------
1810 68856 : subroutine mp_gatherlons_f3d(f,k0,k1,i0,i1,j0,j1,nflds)
1811 : !
1812 : ! Gather longitude data in a row of tasks to leftmost task in the row.
1813 : ! On entry f(k0:k1,i0:i1,j0:j1,nflds) is defined for current task.
1814 : ! On exit f(k0:k1,nlonp4,j0:j1,nflds) is defined for task with mytidi==0.
1815 : !
1816 :
1817 : !
1818 : ! Args:
1819 : !
1820 : integer,intent(in) :: k0,k1,i0,i1,j0,j1,nflds
1821 : ! real(r8),intent(inout) :: f(k0:k1,nlon,j0:j1,nflds)
1822 : type(array_ptr_type) :: f(nflds) ! f(n)%ptr(k0:k1,nlon,j0:j1)
1823 : !
1824 : ! Local:
1825 : !
1826 : integer :: irstat(MPI_STATUS_SIZE) ! mpi receive status
1827 : integer :: j,n,nlons,nlonrecv,nlevs,len,idest,isrc,ier, &
1828 : isend,irecv,itask,lonrecv0,lonrecv1,mtag
1829 : real(r8) :: &
1830 137712 : sndbuf(k0:k1,mxlon,mxlat+4,nflds), & ! send buffer
1831 137712 : rcvbuf(k0:k1,mxlon,mxlat+4,nflds) ! recv buffer
1832 : !
1833 : ! Exec:
1834 : !
1835 68856 : nlons = i1-i0+1
1836 68856 : nlevs = k1-k0+1
1837 :
1838 494673816 : sndbuf = 0._r8
1839 494673816 : rcvbuf = 0._r8
1840 68856 : len = nlevs*mxlon*(mxlat+4)*nflds ! +4 is for when this is called from mp_pole_halos
1841 : !
1842 : ! If mytidi==0, receive from other tasks in my row (mytidi>0,mytidj):
1843 68856 : if (mytidi == 0) then
1844 68856 : do itask=1,ntaski-1
1845 63118 : isrc = itask_table_geo(itask,mytidj)
1846 63118 : mtag = isrc+mytid
1847 63118 : call mpi_irecv(rcvbuf,len,MPI_REAL8,isrc,mtag,mpi_comm_edyn,irecv,ier)
1848 63118 : if (ier /= 0) &
1849 0 : call handle_mpi_err(ier,'mp_gatherlons_f3d recv fm isrc')
1850 63118 : call mpi_wait(irecv,irstat,ier)
1851 63118 : if (ier /= 0) &
1852 0 : call handle_mpi_err(ier,'mp_gatherlons_f3d wait for recv0')
1853 : !
1854 : ! Copy data from receive buffer:
1855 63118 : lonrecv0 = tasks(isrc)%lon0
1856 63118 : lonrecv1 = tasks(isrc)%lon1
1857 63118 : nlonrecv = lonrecv1-lonrecv0+1
1858 146186 : do n=1,nflds
1859 456038 : do j=j0,j1
1860 325856080 : f(n)%ptr(k0:k1,lonrecv0:lonrecv1,j) = rcvbuf(k0:k1,1:nlonrecv,j-j0+1,n)
1861 : enddo ! j=j0,j1
1862 : enddo ! n=1,nflds
1863 : enddo ! itask=1,ntaski-1
1864 : !
1865 : ! If mytidi > 0, load send buffer, and send to task (0,mytidj):
1866 : else ! mytidi /= 0
1867 63118 : idest = itask_table_geo(0,mytidj)
1868 140448 : do n=1,nflds
1869 456038 : do j=j0,j1
1870 325856080 : sndbuf(:,1:nlons,j-j0+1,n) = f(n)%ptr(k0:k1,i0:i1,j)
1871 : enddo ! j=j0,j1
1872 : enddo ! n=1,nflds
1873 63118 : mtag = idest+mytid
1874 63118 : call mpi_isend(sndbuf,len,MPI_REAL8,idest,mtag,mpi_comm_edyn,isend,ier)
1875 63118 : if (ier /= 0) &
1876 0 : call handle_mpi_err(ier,'mp_gatherlons_f3d send0 to idest')
1877 63118 : call mpi_wait(isend,irstat,ier)
1878 63118 : if (ier /= 0) &
1879 0 : call handle_mpi_err(ier,'mp_gatherlons_f3d wait for send0')
1880 : endif ! mytidi==0
1881 68856 : end subroutine mp_gatherlons_f3d
1882 : !-----------------------------------------------------------------------
1883 68856 : subroutine mp_scatterlons_f3d(f,k0,k1,i0,i1,j0,j1,nflds)
1884 : !
1885 : ! Redistribute longitudes from left most task in j-row to other tasks
1886 : ! in the row.
1887 : ! On input, f(:,nlonp4,j0:j1,nflds) is defined for tasks with mytidi==0.
1888 : ! On output, f(:,i0:i1,j0:j1,nflds) is defined for all tasks.
1889 : !
1890 : ! Args:
1891 : !
1892 : integer,intent(in) :: k0,k1,i0,i1,j0,j1,nflds
1893 : type(array_ptr_type) :: f(nflds) ! f(n)%ptr(k0:k1,nlon,j0:j1)
1894 : !
1895 : ! Local:
1896 : !
1897 : integer :: irstat(MPI_STATUS_SIZE) ! mpi receive status
1898 : integer :: j,n,nlevs,nlons,nlonsend,len,idest,isrc,ier, &
1899 : isend,irecv,itask,lonsend0,lonsend1,mtag
1900 : real(r8) :: &
1901 137712 : sndbuf(k0:k1,mxlon,mxlat+4,nflds), & ! send buffer
1902 137712 : rcvbuf(k0:k1,mxlon,mxlat+4,nflds) ! recv buffer
1903 : !
1904 : ! Exec:
1905 : !
1906 68856 : nlons = i1-i0+1
1907 68856 : nlevs = k1-k0+1
1908 :
1909 989347632 : sndbuf = 0._r8 ; rcvbuf = 0._r8
1910 68856 : len = nlevs*mxlon*(mxlat+4)*nflds ! +4 is for when this is called from mp_pole_halos
1911 : !
1912 : ! If mytidi==0, send to other tasks in my row (mytidi>0,mytidj):
1913 68856 : if (mytidi == 0) then
1914 68856 : do itask=1,ntaski-1
1915 63118 : idest = itask_table_geo(itask,mytidj)
1916 63118 : lonsend0 = tasks(idest)%lon0
1917 63118 : lonsend1 = tasks(idest)%lon1
1918 63118 : nlonsend = lonsend1-lonsend0+1
1919 63118 : mtag = idest+mytid
1920 140448 : do n=1,nflds
1921 456038 : do j=j0,j1
1922 325856080 : sndbuf(:,1:nlonsend,j-j0+1,n) = f(n)%ptr(:,lonsend0:lonsend1,j)
1923 : enddo ! j=j0,j1
1924 : enddo ! n=1,nflds
1925 : mtag = idest+mytid
1926 63118 : call mpi_isend(sndbuf,len,MPI_REAL8,idest,mtag,mpi_comm_edyn,isend,ier)
1927 63118 : if (ier /= 0) call handle_mpi_err(ier,'mp_scatterlons_f3d send to idest')
1928 63118 : call mpi_wait(isend,irstat,ier)
1929 68856 : if (ier /= 0) call handle_mpi_err(ier,'mp_scatterlons_f3d wait for send')
1930 : enddo ! itask=1,ntaski-1
1931 : !
1932 : ! If mytidi > 0, receive from task (0,mytidj):
1933 : else
1934 63118 : isrc = itask_table_geo(0,mytidj)
1935 63118 : mtag = isrc+mytid
1936 63118 : call mpi_irecv(rcvbuf,len,MPI_REAL8,isrc,mtag,mpi_comm_edyn,irecv,ier)
1937 63118 : if (ier /= 0) &
1938 0 : call handle_mpi_err(ier,'mp_scatterlons_f3d recv fm isrc')
1939 63118 : call mpi_wait(irecv,irstat,ier)
1940 63118 : if (ier /= 0) &
1941 0 : call handle_mpi_err(ier,'mp_scatterlons_f3d wait for recv')
1942 140448 : do n=1,nflds
1943 456038 : do j=j0,j1
1944 325856080 : f(n)%ptr(:,i0:i1,j) = rcvbuf(:,1:nlons,j-j0+1,n)
1945 : enddo ! j=j0,j1
1946 : enddo ! n=1,nflds
1947 : endif
1948 68856 : end subroutine mp_scatterlons_f3d
1949 : !-----------------------------------------------------------------------
1950 0 : subroutine handle_mpi_err(ierrcode,string)
1951 : !
1952 : ! Args:
1953 : integer,intent(in) :: ierrcode
1954 : character(len=*) :: string
1955 : !
1956 : ! Local:
1957 : character(len=80) :: errstring
1958 : integer :: len_errstring, ierr
1959 : !
1960 0 : call mpi_error_string(ierrcode,errstring,len_errstring, ierr)
1961 0 : write(iulog,"(/,'>>> mpi error: ',a)") trim(string)
1962 0 : write(iulog,"(' ierrcode=',i3,': ',a)") trim(errstring)
1963 0 : end subroutine handle_mpi_err
1964 : !-----------------------------------------------------------------------
1965 5493576 : integer function ixfind(iarray,idim,itarget,icount)
1966 : !
1967 : ! Search iarray(idim) for itarget, returning first index in iarray
1968 : ! where iarray(idim)==target. Also return number of elements of
1969 : ! iarray that == itarget in icount.
1970 : !
1971 : ! Args:
1972 : integer,intent(in) :: idim,itarget
1973 : integer,intent(in) :: iarray(idim)
1974 : integer,intent(out) :: icount
1975 : !
1976 : ! Local:
1977 : integer :: i
1978 : !
1979 5493576 : ixfind = 0
1980 5493576 : icount = 0
1981 39104627 : if (.not.any(iarray==itarget)) return
1982 549357600 : icount = count(iarray==itarget)
1983 39104627 : do i=1,idim
1984 39104627 : if (iarray(i)==itarget) then
1985 : ixfind = i
1986 : exit
1987 : endif
1988 : enddo
1989 : end function ixfind
1990 :
1991 : !-----------------------------------------------------------------------
1992 401280 : subroutine setpoles(f,k0,k1,i0,i1,j0,j1)
1993 : !
1994 : ! Args:
1995 : integer,intent(in) :: k0,k1,i0,i1,j0,j1
1996 : real(r8),intent(inout) :: f(k0:k1,i0:i1,j0:j1)
1997 : !
1998 : ! Local:
1999 : integer :: i,j,k,lon0,lon1,it,itask
2000 : type(array_ptr_type) :: ptr(1)
2001 376200 : real(r8) :: fave(k0:k1)
2002 : real(r8) :: rnlon
2003 :
2004 401280 : if (j0 /= 1 .and. j1 /= nlat_geo) then
2005 : return ! subdomain does not include poles
2006 : end if
2007 :
2008 25080 : rnlon = real(nlon_geo,kind=r8)
2009 125400 : allocate(ptr(1)%ptr(k0:k1,nlon_geo,j0:j1))
2010 : !
2011 : ! Define subdomains in global longitude dimension of ptmp:
2012 : !
2013 100320 : do j=j0,j1
2014 1003200 : do i=i0,i1
2015 42510600 : ptr(1)%ptr(k0:k1,i,j) = f(k0:k1,i,j)
2016 : enddo
2017 : enddo
2018 : !
2019 : ! Get values for global longitudes at the latitude below each pole,
2020 : ! average them at each level, and assign the average redundantly
2021 : ! to all lons at each pole.
2022 : !
2023 25080 : call mp_gatherlons_f3d(ptr,k0,k1,i0,i1,j0,j1,1)
2024 : !
2025 25080 : if (mytidi==0) then ! only westernmost tasks have global longitudes
2026 :
2027 2090 : if (j0 == 1) then ! subdomain includes south pole
2028 49115 : fave(:) = 0._r8
2029 : !
2030 : ! Find average of all lons at each level, at first lat equatorward of south pole.
2031 : !
2032 49115 : do k=k0,k1
2033 6970150 : do i=1,nlon_geo
2034 6970150 : fave(k) = fave(k)+ptr(1)%ptr(k,i,j0+1)
2035 : enddo
2036 49115 : fave(k) = fave(k) / rnlon
2037 : enddo
2038 : !
2039 : ! Define south pole in ptmp on subdomains for each tasks in my latitude row
2040 : ! (I am SW corner task):
2041 : !
2042 13585 : do it=0,ntaski-1
2043 12540 : itask = tasks(itask_table_geo(it,mytidj))%mytid
2044 12540 : lon0 = tasks(itask)%lon0
2045 12540 : lon1 = tasks(itask)%lon1
2046 590425 : do k=k0,k1
2047 7511460 : ptr(1)%ptr(k,lon0:lon1,j0) = fave(k) ! all lons get the average
2048 : enddo
2049 : enddo
2050 : endif ! south pole
2051 :
2052 2090 : if (j1 == nlat_geo) then ! subdomain includes north pole
2053 49115 : fave(:) = 0._r8
2054 : !
2055 : ! Find average of all lons at each level, at first lat equatorward of north pole.
2056 : !
2057 49115 : do k=k0,k1
2058 6970150 : do i=1,nlon_geo
2059 6970150 : fave(k) = fave(k)+ptr(1)%ptr(k,i,j1-1)
2060 : enddo
2061 49115 : fave(k) = fave(k) / rnlon
2062 : enddo
2063 : if (debug.and.masterproc) write(iulog,"('setpoles: npole fave(k0:k1)=',/,(8es12.4))") fave
2064 : !
2065 : ! Define north pole in ptmp on subdomains for each tasks in my latitude row
2066 : ! (I am NW corner task):
2067 : !
2068 13585 : do it=0,ntaski-1
2069 12540 : itask = tasks(itask_table_geo(it,mytidj))%mytid
2070 12540 : lon0 = tasks(itask)%lon0
2071 12540 : lon1 = tasks(itask)%lon1
2072 590425 : do k=k0,k1
2073 7511460 : ptr(1)%ptr(k,lon0:lon1,j1) = fave(k)
2074 : enddo
2075 : enddo
2076 : endif ! north pole
2077 : endif ! mytidj==0
2078 : !
2079 : ! Scatter to tasks in my latitude row:
2080 : !
2081 25080 : call mp_scatterlons_f3d(ptr,k0,k1,i0,i1,j0,j1,1)
2082 : !
2083 : ! Define poles on current subdomain inout arg array:
2084 : !
2085 25080 : if (j0==1) then
2086 163020 : do i=i0,i1
2087 7085100 : do k=k0,k1
2088 7072560 : f(k,i,j0) = ptr(1)%ptr(k,i,j0)
2089 : enddo
2090 : enddo
2091 : endif
2092 25080 : if (j1==nlat_geo) then
2093 163020 : do i=i0,i1
2094 7085100 : do k=k0,k1
2095 7072560 : f(k,i,j1) = ptr(1)%ptr(k,i,j1)
2096 : enddo
2097 : enddo
2098 : endif
2099 25080 : deallocate(ptr(1)%ptr)
2100 : end subroutine setpoles
2101 0 : end module edyn_mpi
|