Line data Source code
1 : module dyn_grid
2 : !-------------------------------------------------------------------------------
3 : !
4 : ! Define SE computational grids on the dynamics decomposition.
5 : !
6 :
7 : ! The grid used by the SE dynamics is called the GLL grid. It is
8 : ! decomposed into elements which correspond to "blocks" in the
9 : ! physics/dynamics coupler terminology. The columns in this grid are
10 : ! located at the Gauss-Lobatto-Legendre (GLL) quadrature points. The GLL
11 : ! grid will also be used by the physics if the CSLAM advection is not used.
12 : ! If CSLAM is used for tracer advection then it uses an FVM grid and the
13 : ! physics will either use the same FVM grid or an FVM grid with a different
14 : ! number of equal area subcells. The FVM grid used by the physics is
15 : ! referred to as the "physgrid".
16 : !
17 : ! Module responsibilities:
18 : !
19 : ! . Provide the physics/dynamics coupler (in module phys_grid) with data for the
20 : ! physics grid on the dynamics decomposition.
21 : !
22 : ! . Create CAM grid objects that are used by the I/O functionality to read
23 : ! data from an unstructured grid format to the dynamics data structures, and
24 : ! to write from the dynamics data structures to unstructured grid format. The
25 : ! global column ordering for the unstructured grid is determined by the SE dycore.
26 : !
27 : !-------------------------------------------------------------------------------
28 :
29 : use shr_kind_mod, only: r8 => shr_kind_r8, shr_kind_cl
30 : use spmd_utils, only: masterproc, iam, mpicom, mstrid=>masterprocid
31 : use spmd_utils, only: npes, mpi_integer, mpi_real8
32 : use constituents, only: pcnst
33 : use physconst, only: pi
34 : use cam_initfiles, only: initial_file_get_id
35 : use cam_grid_support, only: iMap
36 : use physics_column_type, only: physics_column_t
37 :
38 : use cam_logfile, only: iulog
39 : use cam_abortutils, only: endrun
40 :
41 : use pio, only: file_desc_t
42 :
43 : use dimensions_mod, only: globaluniquecols, nelem, nelemd, nelemdmax
44 : use dimensions_mod, only: ne, np, npsq, fv_nphys, nlev, use_cslam
45 : use element_mod, only: element_t
46 : use fvm_control_volume_mod, only: fvm_struct
47 : use hybvcoord_mod, only: hvcoord_t
48 : use prim_init, only: prim_init1
49 : use edge_mod, only: initEdgeBuffer
50 : use edgetype_mod, only: EdgeBuffer_t
51 : use se_dyn_time_mod, only: TimeLevel_t
52 : use dof_mod, only: UniqueCoords, UniquePoints
53 :
54 : implicit none
55 : private
56 : save
57 :
58 : integer, parameter :: dyn_decomp = 101 ! The SE dynamics grid
59 : integer, parameter :: fvm_decomp = 102 ! The FVM (CSLAM) grid
60 : integer, parameter :: physgrid_d = 103 ! physics grid on dynamics decomp
61 : integer, parameter :: ini_decomp = 104 ! alternate dynamics grid for reading initial file
62 : character(len=3), protected :: ini_grid_name
63 :
64 : ! Name of horizontal grid dimension in initial file.
65 : character(len=6), protected :: ini_grid_hdim_name = ''
66 :
67 : integer, parameter :: ptimelevels = 2
68 :
69 : type (TimeLevel_t) :: TimeLevel ! main time level struct (used by tracers)
70 : type (hvcoord_t) :: hvcoord
71 : type(element_t), pointer :: elem(:) => null() ! local GLL elements for this task
72 : type(fvm_struct), pointer :: fvm(:) => null() ! local FVM elements for this task
73 :
74 : public :: dyn_decomp
75 : public :: ini_grid_name
76 : public :: ini_grid_hdim_name
77 : public :: ptimelevels
78 : public :: TimeLevel
79 : public :: hvcoord
80 : public :: elem
81 : public :: fvm
82 : public :: edgebuf
83 :
84 : public :: dyn_grid_init ! Initialize the dynamics grid
85 : public :: get_dyn_grid_info ! Return physics grid column information
86 : public :: physgrid_copy_attributes_d ! Attributes to copy to physics grid
87 :
88 : !!XXgoldyXX: v These need to be removed to complete the weak scaling transition.
89 : public :: get_horiz_grid_d
90 : public :: get_horiz_grid_dim_d
91 : public :: get_dyn_grid_parm
92 : public :: get_dyn_grid_parm_real1d
93 : !!XXgoldyXX: ^ These need to be removed to complete the weak scaling transition.
94 : public :: dyn_grid_get_elem_coords ! get coords of a specified block element
95 :
96 : ! Note: dyn_grid_get_colndx is not implemenented in SE
97 : public :: dyn_grid_get_colndx ! get element block/column and MPI process indices
98 :
99 : ! Namelist variables controlling grid writing.
100 : ! Read in dyn_readnl from dyn_se_inparm group.
101 : character(len=16), public :: se_write_grid_file = 'no'
102 : character(len=shr_kind_cl), public :: se_grid_filename = ''
103 : logical, public :: se_write_gll_corners = .false.
104 :
105 : type(physics_column_t), allocatable, target :: local_dyn_columns(:)
106 :
107 : ! number of global dynamics columns. Set by SE dycore init.
108 : integer :: ngcols_d = 0
109 : ! number of global elements. Set by SE dycore init.
110 : integer :: nelem_d = 0
111 :
112 : real(r8), parameter :: rad2deg = 180.0_r8/pi
113 :
114 : type(EdgeBuffer_t) :: edgebuf
115 :
116 : !=============================================================================
117 : contains
118 : !=============================================================================
119 :
120 1536 : subroutine dyn_grid_init()
121 :
122 : ! Initialize SE grid, and decomposition.
123 :
124 : use hycoef, only: hycoef_init, hypi, hypm, nprlev, &
125 : hyam, hybm, hyai, hybi, ps0
126 : use ref_pres, only: ref_pres_init
127 : use spmd_utils, only: MPI_MAX, MPI_INTEGER, mpicom
128 : use time_manager, only: get_nstep, get_step_size
129 : use dp_mapping, only: dp_init, dp_write
130 : use native_mapping, only: do_native_mapping, create_native_mapping_files
131 :
132 : use parallel_mod, only: par
133 : use hybrid_mod, only: hybrid_t, init_loop_ranges, &
134 : get_loop_ranges, config_thread_region
135 : use control_mod, only: qsplit, rsplit
136 : use se_dyn_time_mod, only: tstep, nsplit
137 : use fvm_mod, only: fvm_init2, fvm_init3, fvm_pg_init
138 : use dimensions_mod, only: irecons_tracer
139 : use comp_gll_ctr_vol, only: gll_grid_write
140 : use air_composition, only: thermodynamic_active_species_num
141 :
142 : ! Local variables
143 :
144 : type(file_desc_t), pointer :: fh_ini
145 :
146 : integer :: qsize_local
147 : integer :: k
148 :
149 : type(hybrid_t) :: hybrid
150 : integer :: ierr
151 : integer :: dtime
152 :
153 1536 : real(r8), allocatable ::clat(:), clon(:), areaa(:)
154 : integer :: nets, nete
155 :
156 : character(len=*), parameter :: sub = 'dyn_grid_init'
157 : !----------------------------------------------------------------------------
158 :
159 : ! Get file handle for initial file and first consistency check
160 1536 : fh_ini => initial_file_get_id()
161 :
162 : ! Initialize hybrid coordinate arrays
163 1536 : call hycoef_init(fh_ini, psdry=.true.)
164 :
165 144384 : hvcoord%hyam = hyam
166 145920 : hvcoord%hyai = hyai
167 144384 : hvcoord%hybm = hybm
168 145920 : hvcoord%hybi = hybi
169 1536 : hvcoord%ps0 = ps0
170 144384 : do k = 1, nlev
171 144384 : hvcoord%hybd(k) = hvcoord%hybi(k+1) - hvcoord%hybi(k)
172 : end do
173 :
174 : ! Initialize reference pressures
175 1536 : call ref_pres_init(hypi, hypm, nprlev)
176 :
177 1536 : if (iam < par%nprocs) then
178 :
179 1536 : call prim_init1(elem, fvm, par, TimeLevel)
180 1536 : if (use_cslam) then
181 1536 : call dp_init(elem, fvm)
182 : end if
183 :
184 1536 : if (fv_nphys > 0) then
185 : qsize_local = 3
186 : else
187 0 : qsize_local = pcnst + 3
188 : end if
189 :
190 1536 : call initEdgeBuffer(par, edgebuf, elem, qsize_local*nlev, nthreads=1)
191 :
192 : else ! auxiliary processes
193 :
194 0 : globaluniquecols = 0
195 0 : nelem = 0
196 0 : nelemd = 0
197 0 : nelemdmax = 0
198 : endif
199 :
200 : ! nelemdmax is computed on the dycore comm, we need it globally.
201 1536 : ngcols_d = nelemdmax
202 1536 : call MPI_Allreduce(ngcols_d, nelemdmax, 1, MPI_INTEGER, MPI_MAX, mpicom, ierr)
203 : ! All pes might not have the correct global grid size
204 1536 : call MPI_Allreduce(globaluniquecols, ngcols_d, 1, MPI_INTEGER, MPI_MAX, mpicom, ierr)
205 : ! All pes might not have the correct number of elements
206 1536 : call MPI_Allreduce(nelem, nelem_d, 1, MPI_INTEGER, MPI_MAX, mpicom, ierr)
207 :
208 : ! nelemd (# of elements on this task) is set by prim_init1
209 1536 : call init_loop_ranges(nelemd)
210 :
211 : ! Dynamics timestep
212 : !
213 : ! Note: dtime = timestep for physics/dynamics coupling
214 : ! tstep = the dynamics timestep:
215 1536 : dtime = get_step_size()
216 1536 : tstep = dtime / real(nsplit*qsplit*rsplit, r8)
217 1536 : TimeLevel%nstep = get_nstep()*nsplit*qsplit*rsplit
218 :
219 : ! initial SE (subcycled) nstep
220 1536 : TimeLevel%nstep0 = 0
221 :
222 : ! determine whether initial file uses 'ncol' or 'ncol_d'
223 1536 : call get_hdim_name(fh_ini, ini_grid_hdim_name)
224 :
225 : ! Define the dynamics and physics grids on the dynamics decompostion.
226 : ! Physics grid on the physics decomposition is defined in phys_grid_init.
227 1536 : call define_cam_grids()
228 :
229 1536 : if (fv_nphys > 0) then
230 :
231 : ! ================================================
232 : ! finish fvm initialization
233 : ! ================================================
234 :
235 1536 : if (iam < par%nprocs) then
236 1536 : hybrid = config_thread_region(par,'serial')
237 1536 : call get_loop_ranges(hybrid, ibeg=nets, iend=nete)
238 :
239 : ! initialize halo coordinate variables for cslam and physgrid
240 1536 : call fvm_init2(elem, fvm, hybrid, nets, nete)
241 1536 : call fvm_pg_init(elem, fvm, hybrid, nets, nete, irecons_tracer)
242 1536 : call fvm_init3(elem, fvm, hybrid, nets, nete, irecons_tracer)
243 : end if
244 :
245 : end if
246 :
247 : ! write grid and mapping files
248 1536 : if (se_write_gll_corners) then
249 0 : call write_grid_mapping(par, elem)
250 : end if
251 :
252 1536 : if (trim(se_write_grid_file) /= "no") then
253 0 : if (fv_nphys > 0) then
254 0 : call dp_write(elem, fvm, trim(se_write_grid_file), trim(se_grid_filename))
255 : else
256 0 : call gll_grid_write(elem, trim(se_write_grid_file), trim(se_grid_filename))
257 : end if
258 : end if
259 :
260 1536 : if (do_native_mapping) then
261 :
262 0 : allocate(areaA(ngcols_d))
263 0 : allocate(clat(ngcols_d),clon(ngcols_d))
264 0 : call get_horiz_grid_int(ngcols_d, clat_d_out=clat, clon_d_out=clon, area_d_out=areaA)
265 :
266 : ! Create mapping files using SE basis functions
267 0 : call create_native_mapping_files(par, elem, 'native', ngcols_d, clat, clon, areaa)
268 0 : call create_native_mapping_files(par, elem, 'bilin', ngcols_d, clat, clon, areaa)
269 :
270 0 : deallocate(areaa, clat, clon)
271 : end if
272 :
273 1536 : call mpi_barrier(mpicom, ierr)
274 :
275 1536 : end subroutine dyn_grid_init
276 :
277 : !==============================================================================
278 :
279 1536 : subroutine get_dyn_grid_info(hdim1_d, hdim2_d, num_lev, &
280 : index_model_top_layer, index_surface_layer, unstructured, dyn_columns)
281 : !------------------------------------------------------------
282 : !
283 : ! get_dyn_grid_info returns physics grid column information
284 : !
285 : !------------------------------------------------------------
286 1536 : use shr_const_mod, only: SHR_CONST_PI
287 : use cam_abortutils, only: endrun
288 : use spmd_utils, only: iam
289 : use coordinate_systems_mod, only: spherical_polar_t
290 : ! Dummy arguments
291 : integer, intent(out) :: hdim1_d ! # longitudes or grid size
292 : integer, intent(out) :: hdim2_d ! # latitudes or 1
293 : integer, intent(out) :: num_lev ! # levels
294 : integer, intent(out) :: index_model_top_layer
295 : integer, intent(out) :: index_surface_layer
296 : logical, intent(out) :: unstructured
297 : ! dyn_columns will contain a copy of the physics column info local to this
298 : ! dynamics task
299 : type(physics_column_t), allocatable, intent(out) :: dyn_columns(:)
300 : ! Local variables
301 : integer :: lindex
302 : integer :: gindex
303 : integer :: elem_ind, col_ind, ii, jj
304 : integer :: num_local_cols
305 : type(spherical_polar_t) :: coord
306 : real(r8) :: dcoord
307 : real(r8), parameter :: radtodeg = 180.0_r8 / SHR_CONST_PI
308 : real(r8), parameter :: degtorad = SHR_CONST_PI / 180.0_r8
309 : character(len=*), parameter :: subname = 'get_dyn_grid_info'
310 :
311 1536 : unstructured = .true. ! SE is an unstructured dycore
312 1536 : if (fv_nphys > 0) then ! physics uses an FVM grid
313 1536 : num_local_cols = nelemd * fv_nphys * fv_nphys
314 : else
315 0 : num_local_cols = 0
316 0 : do elem_ind = 1, nelemd
317 0 : num_local_cols = num_local_cols + elem(elem_ind)%idxP%NumUniquePts
318 : end do
319 : end if
320 1536 : if (allocated(local_dyn_columns)) then
321 : ! Check for correct number of columns
322 0 : if (size(local_dyn_columns) /= num_local_cols) then
323 0 : call endrun(subname//': called with inconsistent column numbers')
324 : end if
325 : else
326 104880 : allocate(local_dyn_columns(num_local_cols))
327 1536 : if (fv_nphys > 0) then ! physics uses an FVM grid
328 1536 : hdim1_d = nelem * fv_nphys * fv_nphys
329 : else
330 0 : hdim1_d = ngcols_d
331 : end if
332 1536 : hdim2_d = 1
333 1536 : num_lev = nlev
334 1536 : index_model_top_layer = 1
335 1536 : index_surface_layer = nlev
336 1536 : lindex = 0
337 12336 : do elem_ind = 1, nelemd
338 12336 : if (fv_nphys > 0) then ! physics uses an FVM grid
339 108000 : do col_ind = 0, (fv_nphys * fv_nphys) - 1
340 97200 : lindex = lindex + 1
341 97200 : ii = MOD(col_ind, fv_nphys) + 1
342 97200 : jj = (col_ind / fv_nphys) + 1
343 97200 : coord = fvm(elem_ind)%center_cart_physgrid(ii, jj)
344 97200 : local_dyn_columns(lindex)%lat_rad = coord%lat
345 97200 : dcoord = local_dyn_columns(lindex)%lat_rad * radtodeg
346 97200 : local_dyn_columns(lindex)%lat_deg = dcoord
347 97200 : local_dyn_columns(lindex)%lon_rad = coord%lon
348 97200 : dcoord = local_dyn_columns(lindex)%lon_rad * radtodeg
349 97200 : local_dyn_columns(lindex)%lon_deg = dcoord
350 0 : local_dyn_columns(lindex)%area = &
351 97200 : fvm(elem_ind)%area_sphere_physgrid(ii,jj)
352 0 : local_dyn_columns(lindex)%weight = &
353 97200 : local_dyn_columns(lindex)%area
354 : ! File decomposition
355 0 : gindex = ((elem(elem_ind)%GlobalId-1) * fv_nphys * fv_nphys) + &
356 97200 : col_ind + 1
357 97200 : local_dyn_columns(lindex)%global_col_num = gindex
358 : ! Note, coord_indices not used for unstructured dycores
359 : ! Dynamics decomposition
360 97200 : local_dyn_columns(lindex)%dyn_task = iam
361 97200 : local_dyn_columns(lindex)%local_dyn_block = elem_ind
362 : local_dyn_columns(lindex)%global_dyn_block = &
363 97200 : elem(elem_ind)%GlobalId
364 97200 : allocate(local_dyn_columns(lindex)%dyn_block_index(1))
365 108000 : local_dyn_columns(lindex)%dyn_block_index(1) = col_ind + 1
366 : end do
367 : else
368 0 : do col_ind = 1, elem(elem_ind)%idxP%NumUniquePts
369 0 : lindex = lindex + 1
370 0 : ii = elem(elem_ind)%idxP%ia(col_ind)
371 0 : jj = elem(elem_ind)%idxP%ja(col_ind)
372 :
373 0 : dcoord = elem(elem_ind)%spherep(ii,jj)%lat
374 0 : local_dyn_columns(lindex)%lat_rad = dcoord
375 0 : dcoord = local_dyn_columns(lindex)%lat_rad * radtodeg
376 0 : local_dyn_columns(lindex)%lat_deg = dcoord
377 0 : dcoord = elem(elem_ind)%spherep(ii,jj)%lon
378 0 : local_dyn_columns(lindex)%lon_rad = dcoord
379 0 : dcoord = local_dyn_columns(lindex)%lon_rad * radtodeg
380 0 : local_dyn_columns(lindex)%lon_deg = dcoord
381 0 : local_dyn_columns(lindex)%area = &
382 0 : 1.0_r8 / elem(elem_ind)%rspheremp(ii,jj)
383 0 : local_dyn_columns(lindex)%weight = local_dyn_columns(lindex)%area
384 : ! File decomposition
385 0 : gindex = elem(elem_ind)%idxP%UniquePtoffset + col_ind - 1
386 0 : local_dyn_columns(lindex)%global_col_num = gindex
387 : ! Note, coord_indices not used for unstructured dycores
388 : ! Dynamics decomposition
389 0 : local_dyn_columns(lindex)%dyn_task = iam
390 0 : local_dyn_columns(lindex)%local_dyn_block = elem_ind
391 : local_dyn_columns(lindex)%global_dyn_block = &
392 0 : elem(elem_ind)%GlobalId
393 0 : allocate(local_dyn_columns(lindex)%dyn_block_index(1))
394 0 : local_dyn_columns(lindex)%dyn_block_index(1) = col_ind
395 : end do
396 : end if
397 : end do
398 : end if
399 : ! Copy the information to the output array
400 1536 : if (allocated(dyn_columns)) then
401 0 : deallocate(dyn_columns)
402 : end if
403 104880 : allocate(dyn_columns(lindex))
404 98736 : do lindex = 1, num_local_cols
405 98736 : dyn_columns(lindex) = local_dyn_columns(lindex)
406 : end do
407 :
408 1536 : end subroutine get_dyn_grid_info
409 :
410 : !==============================================================================
411 :
412 1536 : subroutine get_horiz_grid_dim_d(hdim1_d,hdim2_d)
413 :
414 : ! Returns declared horizontal dimensions of computational grid.
415 : ! For non-lon/lat grids, declare grid to be one-dimensional,
416 : ! i.e., (ngcols_d x 1)
417 :
418 : !------------------------------Arguments--------------------------------
419 : integer, intent(out) :: hdim1_d ! first horizontal dimension
420 : integer, intent(out), optional :: hdim2_d ! second horizontal dimension
421 : !-----------------------------------------------------------------------
422 :
423 1536 : if (fv_nphys > 0) then
424 1536 : hdim1_d = fv_nphys*fv_nphys*nelem_d
425 : else
426 0 : hdim1_d = ngcols_d
427 : end if
428 1536 : if (present(hdim2_d)) then
429 1536 : hdim2_d = 1
430 : end if
431 :
432 1536 : end subroutine get_horiz_grid_dim_d
433 :
434 : !=========================================================================================
435 :
436 0 : subroutine get_horiz_grid_int(nxy, clat_d_out, clon_d_out, area_d_out, &
437 0 : wght_d_out, lat_d_out, lon_d_out)
438 :
439 : ! Return global arrays of latitude and longitude (in radians), column
440 : ! surface area (in radians squared) and surface integration weights for
441 : ! global column indices that will be passed to/from physics
442 :
443 : ! arguments
444 : integer, intent(in) :: nxy ! array sizes
445 :
446 : real(r8), intent(out), optional :: clat_d_out(:) ! column latitudes
447 : real(r8), intent(out), optional :: clon_d_out(:) ! column longitudes
448 : real(r8), intent(out), target, optional :: area_d_out(:) ! column surface
449 :
450 : real(r8), intent(out), target, optional :: wght_d_out(:) ! column integration weight
451 : real(r8), intent(out), optional :: lat_d_out(:) ! column degree latitudes
452 : real(r8), intent(out), optional :: lon_d_out(:) ! column degree longitudes
453 :
454 : ! local variables
455 0 : real(r8), pointer :: area_d(:)
456 0 : real(r8), pointer :: temp(:)
457 : character(len=SHR_KIND_CL) :: errormsg
458 : character(len=*), parameter :: sub = 'get_horiz_grid_int'
459 : !----------------------------------------------------------------------------
460 :
461 : ! check that nxy is set to correct size for global arrays
462 0 : if (fv_nphys > 0) then
463 0 : if (nxy < fv_nphys*fv_nphys*nelem_d) then
464 0 : write(errormsg, *) sub//': arrays too small; Passed', &
465 0 : nxy, ', needs to be at least', fv_nphys*fv_nphys*nelem_d
466 0 : call endrun(errormsg)
467 : end if
468 : else
469 0 : if (nxy < ngcols_d) then
470 0 : write(errormsg,*) sub//': arrays not large enough; ', &
471 0 : 'Passed', nxy, ', needs to be at least', ngcols_d
472 0 : call endrun(errormsg)
473 : end if
474 : end if
475 :
476 0 : if ( present(area_d_out) ) then
477 0 : if (size(area_d_out) /= nxy) then
478 0 : call endrun(sub//': bad area_d_out array size')
479 : end if
480 0 : area_d => area_d_out
481 0 : call create_global_area(area_d)
482 :
483 0 : else if ( present(wght_d_out) ) then
484 0 : if (size(wght_d_out) /= nxy) then
485 0 : call endrun(sub//': bad wght_d_out array size')
486 : end if
487 0 : area_d => wght_d_out
488 0 : call create_global_area(area_d)
489 :
490 : end if
491 :
492 : ! If one of area_d_out or wght_d_out was present, then it was computed
493 : ! above. If they were *both* present, then do this:
494 0 : if ( present(area_d_out) .and. present(wght_d_out) ) then
495 0 : wght_d_out(:) = area_d_out(:)
496 : end if
497 :
498 0 : if (present(clon_d_out)) then
499 0 : if (size(clon_d_out) /= nxy) then
500 0 : call endrun(sub//': bad clon_d_out array size in dyn_grid')
501 : end if
502 : end if
503 :
504 0 : if (present(clat_d_out)) then
505 :
506 0 : if (size(clat_d_out) /= nxy) then
507 0 : call endrun('bad clat_d_out array size in dyn_grid')
508 : end if
509 :
510 0 : if (present(clon_d_out)) then
511 0 : call create_global_coords(clat_d_out, clon_d_out, lat_d_out, lon_d_out)
512 : else
513 0 : allocate(temp(nxy))
514 0 : call create_global_coords(clat_d_out, temp, lat_d_out, lon_d_out)
515 0 : deallocate(temp)
516 : end if
517 :
518 0 : else if (present(clon_d_out)) then
519 :
520 0 : allocate(temp(nxy))
521 0 : call create_global_coords(temp, clon_d_out, lat_d_out, lon_d_out)
522 0 : deallocate(temp)
523 :
524 : end if
525 :
526 0 : end subroutine get_horiz_grid_int
527 :
528 : !=========================================================================================
529 :
530 1536 : subroutine physgrid_copy_attributes_d(gridname, grid_attribute_names)
531 :
532 : ! create list of attributes for the physics grid that should be copied
533 : ! from the corresponding grid object on the dynamics decomposition
534 :
535 : use cam_grid_support, only: max_hcoordname_len
536 :
537 : ! Dummy arguments
538 : character(len=max_hcoordname_len), intent(out) :: gridname
539 : character(len=max_hcoordname_len), pointer, intent(out) :: grid_attribute_names(:)
540 :
541 1536 : if (fv_nphys > 0) then
542 1536 : gridname = 'physgrid_d'
543 1536 : allocate(grid_attribute_names(2))
544 1536 : grid_attribute_names(1) = 'fv_nphys'
545 1536 : grid_attribute_names(2) = 'ne'
546 : else
547 0 : gridname = 'GLL'
548 0 : allocate(grid_attribute_names(2))
549 0 : grid_attribute_names(1) = 'np'
550 0 : grid_attribute_names(2) = 'ne'
551 : end if
552 :
553 1536 : end subroutine physgrid_copy_attributes_d
554 :
555 : !=========================================================================================
556 :
557 138240 : integer function get_dyn_grid_parm(name) result(ival)
558 :
559 : ! This function is in the process of being deprecated, but is still needed
560 : ! as a dummy interface to satisfy external references from some chemistry routines.
561 :
562 1536 : use pmgrid, only: plat, plev
563 :
564 : character(len=*), intent(in) :: name
565 : !----------------------------------------------------------------------------
566 :
567 138240 : if (name.eq.'plat') then
568 : ival = plat
569 69120 : else if(name.eq.'plon') then
570 69120 : if (fv_nphys>0) then
571 69120 : ival = fv_nphys*fv_nphys*nelem_d
572 : else
573 0 : ival = ngcols_d
574 : end if
575 0 : else if(name.eq.'plev') then
576 : ival = plev
577 :
578 : else
579 0 : ival = -1
580 : end if
581 :
582 138240 : end function get_dyn_grid_parm
583 :
584 : !=========================================================================================
585 :
586 0 : subroutine dyn_grid_get_colndx(igcol, ncols, owners, col, lbk)
587 :
588 : ! For each global column index return the owning task. If the column is owned
589 : ! by this task, then also return the local block number and column index in that
590 : ! block.
591 : !
592 : ! NOTE: this routine needs to be updated for the physgrid
593 :
594 : integer, intent(in) :: ncols
595 : integer, intent(in) :: igcol(ncols)
596 : integer, intent(out) :: owners(ncols)
597 : integer, intent(out) :: col(ncols)
598 : integer, intent(out) :: lbk(ncols)
599 :
600 : !----------------------------------------------------------------------------
601 :
602 0 : owners = (igcol * 0) -1 ! Kill compiler warnings
603 0 : col = -1 ! Kill compiler warnings
604 0 : lbk = -1 ! Kill compiler warnings
605 0 : call endrun('dyn_grid_get_colndx: not implemented for unstructured grids')
606 :
607 0 : end subroutine dyn_grid_get_colndx
608 :
609 : !=========================================================================================
610 :
611 0 : subroutine dyn_grid_get_elem_coords(ie, rlon, rlat, cdex)
612 :
613 : ! Returns coordinates of a specified block element of the dyn grid
614 : !
615 : ! NB: This routine only uses the GLL points (i.e, it ignores the physics
616 : ! grid). This is probably OK as current use is only for dyn_decomp
617 : ! variables in history.
618 :
619 : integer, intent(in) :: ie ! block element index
620 :
621 : real(r8),optional, intent(out) :: rlon(:) ! longitudes of the columns in the element
622 : real(r8),optional, intent(out) :: rlat(:) ! latitudes of the columns in the element
623 : integer, optional, intent(out) :: cdex(:) ! global column index
624 :
625 : integer :: sb,eb, ii, i,j, icol, igcol
626 0 : real(r8), allocatable :: clat(:), clon(:)
627 : !----------------------------------------------------------------------------
628 :
629 0 : if (fv_nphys > 0) then
630 0 : call endrun('dyn_grid_get_colndx: not implemented for the FVM physics grid')
631 : end if
632 :
633 0 : sb = elem(ie)%idxp%UniquePtOffset
634 0 : eb = sb + elem(ie)%idxp%NumUniquePts-1
635 :
636 0 : allocate( clat(sb:eb), clon(sb:eb) )
637 0 : call UniqueCoords( elem(ie)%idxP, elem(ie)%spherep, clat(sb:eb), clon(sb:eb) )
638 :
639 0 : if (present(cdex)) cdex(:) = -1
640 0 : if (present(rlat)) rlat(:) = -999._r8
641 0 : if (present(rlon)) rlon(:) = -999._r8
642 :
643 0 : do ii=1,elem(ie)%idxp%NumUniquePts
644 0 : i=elem(ie)%idxp%ia(ii)
645 0 : j=elem(ie)%idxp%ja(ii)
646 0 : icol = i+(j-1)*np
647 0 : igcol = elem(ie)%idxp%UniquePtoffset+ii-1
648 0 : if (present(cdex)) cdex(icol) = igcol
649 0 : if (present(rlat)) rlat(icol) = clat( igcol )
650 0 : if (present(rlon)) rlon(icol) = clon( igcol )
651 : end do
652 :
653 0 : deallocate( clat, clon )
654 :
655 0 : end subroutine dyn_grid_get_elem_coords
656 :
657 : !=========================================================================================
658 : ! Private routines.
659 : !=========================================================================================
660 :
661 1536 : subroutine get_hdim_name(fh_ptr, grid_hdim_name)
662 : use pio, only: pio_inq_dimid, pio_seterrorhandling
663 : use pio, only: PIO_BCAST_ERROR, PIO_NOERR
664 :
665 : ! Determine whether the supplied file uses 'ncol' or 'ncol_d' horizontal
666 : ! dimension in the unstructured grid. It is also possible when using
667 : ! analytic initial conditions that the file only contains
668 : ! vertical coordinates.
669 : ! Return 'none' if that is the case.
670 :
671 : ! Arguments
672 : type(file_desc_t), pointer :: fh_ptr
673 : character(len=6), intent(out) :: grid_hdim_name ! horizontal dimension name
674 :
675 : ! local variables
676 : integer :: ierr, pio_errtype
677 : integer :: ncol_did
678 :
679 : character(len=*), parameter :: sub = 'get_hdim_name'
680 : !----------------------------------------------------------------------------
681 :
682 : ! Set PIO to return error flags.
683 1536 : call pio_seterrorhandling(fh_ptr, PIO_BCAST_ERROR, pio_errtype)
684 :
685 : ! Check for ncol_d first just in case the file also contains fields on
686 : ! the physics grid.
687 1536 : ierr = pio_inq_dimid(fh_ptr, 'ncol_d', ncol_did)
688 1536 : if (ierr == PIO_NOERR) then
689 :
690 768 : grid_hdim_name = 'ncol_d'
691 :
692 : else
693 :
694 : ! if 'ncol_d' not in file, check for 'ncol'
695 768 : ierr = pio_inq_dimid(fh_ptr, 'ncol', ncol_did)
696 :
697 768 : if (ierr == PIO_NOERR) then
698 :
699 768 : grid_hdim_name = 'ncol'
700 :
701 : else
702 :
703 0 : grid_hdim_name = 'none'
704 :
705 : end if
706 : end if
707 :
708 : ! Return PIO to previous error handling.
709 1536 : call pio_seterrorhandling(fh_ptr, pio_errtype)
710 :
711 1536 : end subroutine get_hdim_name
712 :
713 1536 : subroutine define_cam_grids()
714 :
715 : ! Create grid objects on the dynamics decomposition for grids used by
716 : ! the dycore. The decomposed grid object contains data for the elements
717 : ! in each task and information to map that data to the global grid.
718 : !
719 : ! Notes on dynamic memory management:
720 : !
721 : ! . Coordinate values and the map passed to the horiz_coord_create
722 : ! method are copied to the object. The memory may be deallocated
723 : ! after the object is created.
724 : !
725 : ! . The area values passed to cam_grid_attribute_register are only pointed
726 : ! to by the attribute object, so that memory cannot be deallocated. But the
727 : ! map is copied.
728 : !
729 : ! . The grid_map passed to cam_grid_register is just pointed to.
730 : ! Cannot be deallocated.
731 :
732 : use cam_grid_support, only: horiz_coord_t, horiz_coord_create
733 : use cam_grid_support, only: cam_grid_register, cam_grid_attribute_register
734 : use dimensions_mod, only: nc
735 : use shr_const_mod, only: PI => SHR_CONST_PI
736 :
737 : ! Local variables
738 : integer :: i, ii, j, k, ie, mapind
739 : character(len=8) :: latname, lonname, ncolname, areaname
740 :
741 : type(horiz_coord_t), pointer :: lat_coord
742 : type(horiz_coord_t), pointer :: lon_coord
743 1536 : integer(iMap), pointer :: grid_map(:,:)
744 :
745 1536 : real(r8), allocatable :: pelat_deg(:) ! pe-local latitudes (degrees)
746 1536 : real(r8), allocatable :: pelon_deg(:) ! pe-local longitudes (degrees)
747 1536 : real(r8), pointer :: pearea(:) ! pe-local areas
748 1536 : real(r8), pointer :: pearea_wt(:) ! pe-local areas normalized for unit sphere
749 3072 : integer(iMap) :: fdofP_local(npsq,nelemd) ! pe-local map for dynamics decomp
750 1536 : integer(iMap), allocatable :: pemap(:) ! pe-local map for PIO decomp
751 :
752 : integer :: ncols_fvm, ngcols_fvm
753 1536 : real(r8), allocatable :: fvm_coord(:)
754 1536 : real(r8), pointer :: fvm_area(:)
755 1536 : real(r8), pointer :: fvm_areawt(:)
756 1536 : integer(iMap), pointer :: fvm_map(:)
757 :
758 : integer :: ncols_physgrid, ngcols_physgrid
759 1536 : real(r8), allocatable :: physgrid_coord(:)
760 1536 : real(r8), pointer :: physgrid_area(:)
761 1536 : real(r8), pointer :: physgrid_areawt(:)
762 1536 : integer(iMap), pointer :: physgrid_map(:)
763 :
764 : real(r8), parameter :: rarea_unit_sphere = 1.0_r8 / (4.0_r8*PI)
765 :
766 : !----------------------------------------------------------------------------
767 :
768 : !-----------------------
769 : ! initialize pointers to null
770 : !-----------------------
771 1536 : nullify(pearea_wt)
772 1536 : nullify(pearea)
773 1536 : nullify(fvm_area)
774 1536 : nullify(fvm_areawt)
775 1536 : nullify(fvm_map)
776 1536 : nullify(physgrid_area)
777 1536 : nullify(physgrid_areawt)
778 1536 : nullify(physgrid_map)
779 1536 : nullify(grid_map)
780 :
781 : !-----------------------
782 : ! Create GLL grid object
783 : !-----------------------
784 :
785 : ! Calculate the mapping between element GLL points and file order
786 185136 : fdofp_local = 0_iMap
787 12336 : do ie = 1, nelemd
788 109540 : do ii = 1, elem(ie)%idxP%NumUniquePts
789 97204 : i = elem(ie)%idxP%ia(ii)
790 97204 : j = elem(ie)%idxP%ja(ii)
791 108004 : fdofp_local((np*(j-1))+i,ie) = elem(ie)%idxP%UniquePtoffset + ii - 1
792 : end do
793 : end do
794 :
795 6144 : allocate(pelat_deg(np*np*nelemd))
796 3072 : allocate(pelon_deg(np*np*nelemd))
797 3072 : allocate(pearea(np*np*nelemd))
798 3072 : allocate(pearea_wt(np*np*nelemd))
799 3072 : allocate(pemap(np*np*nelemd))
800 :
801 174336 : pemap = 0_iMap
802 : ii = 1
803 12336 : do ie = 1, nelemd
804 183600 : pemap(ii:ii+npsq-1) = fdofp_local(:,ie)
805 55536 : do j = 1, np
806 226800 : do i = 1, np
807 172800 : pearea(ii) = elem(ie)%mp(i,j)*elem(ie)%metdet(i,j)
808 172800 : pearea_wt(ii) = pearea(ii)*rarea_unit_sphere
809 172800 : pelat_deg(ii) = elem(ie)%spherep(i,j)%lat * rad2deg
810 172800 : pelon_deg(ii) = elem(ie)%spherep(i,j)%lon * rad2deg
811 216000 : ii = ii + 1
812 : end do
813 : end do
814 : end do
815 :
816 : ! If using the physics grid then the GLL grid will use the names with
817 : ! '_d' suffixes and the physics grid will use the unadorned names.
818 : ! This allows fields on both the GLL and physics grids to be written to history
819 : ! output files.
820 1536 : if (trim(ini_grid_hdim_name) == 'ncol_d') then
821 768 : latname = 'lat_d'
822 768 : lonname = 'lon_d'
823 768 : ncolname = 'ncol_d'
824 768 : areaname = 'area_d'
825 : else
826 768 : latname = 'lat'
827 768 : lonname = 'lon'
828 768 : ncolname = 'ncol'
829 768 : areaname = 'area'
830 : end if
831 : lat_coord => horiz_coord_create('lat_d', 'ncol_d', ngcols_d, &
832 1536 : 'latitude', 'degrees_north', 1, size(pelat_deg), pelat_deg, map=pemap)
833 : lon_coord => horiz_coord_create('lon_d', 'ncol_d', ngcols_d, &
834 1536 : 'longitude', 'degrees_east', 1, size(pelon_deg), pelon_deg, map=pemap)
835 :
836 : ! Map for GLL grid
837 4608 : allocate(grid_map(3,npsq*nelemd))
838 692736 : grid_map = 0_iMap
839 : mapind = 1
840 12336 : do j = 1, nelemd
841 185136 : do i = 1, npsq
842 172800 : grid_map(1, mapind) = i
843 172800 : grid_map(2, mapind) = j
844 172800 : grid_map(3, mapind) = pemap(mapind)
845 183600 : mapind = mapind + 1
846 : end do
847 : end do
848 :
849 : ! The native SE GLL grid
850 : call cam_grid_register('GLL', dyn_decomp, lat_coord, lon_coord, &
851 1536 : grid_map, block_indexed=.false., unstruct=.true.)
852 : call cam_grid_attribute_register('GLL', 'area_d', 'gll grid areas', &
853 1536 : 'ncol_d', pearea, map=pemap)
854 : call cam_grid_attribute_register('GLL', 'area_weight_gll', 'gll grid area weights', &
855 1536 : 'ncol_d', pearea_wt, map=pemap)
856 1536 : call cam_grid_attribute_register('GLL', 'np', '', np)
857 1536 : call cam_grid_attribute_register('GLL', 'ne', '', ne)
858 :
859 : ! If dim name is 'ncol', create INI grid
860 : ! We will read from INI grid, but use GLL grid for all output
861 1536 : if (trim(ini_grid_hdim_name) == 'ncol') then
862 :
863 : lat_coord => horiz_coord_create('lat', 'ncol', ngcols_d, &
864 768 : 'latitude', 'degrees_north', 1, size(pelat_deg), pelat_deg, map=pemap)
865 : lon_coord => horiz_coord_create('lon', 'ncol', ngcols_d, &
866 768 : 'longitude', 'degrees_east', 1, size(pelon_deg), pelon_deg, map=pemap)
867 :
868 : call cam_grid_register('INI', ini_decomp, lat_coord, lon_coord, &
869 768 : grid_map, block_indexed=.false., unstruct=.true.)
870 : call cam_grid_attribute_register('INI', 'area', 'ini grid areas', &
871 768 : 'ncol', pearea, map=pemap)
872 : call cam_grid_attribute_register('INI', 'area_weight_ini', 'ini grid area weights', &
873 768 : 'ncol', pearea_wt, map=pemap)
874 :
875 768 : ini_grid_name = 'INI'
876 : else
877 : ! The dyn_decomp grid can be used to read the initial file.
878 768 : ini_grid_name = 'GLL'
879 : end if
880 :
881 : ! Coordinate values and maps are copied into the coordinate and attribute objects.
882 : ! Locally allocated storage is no longer needed.
883 1536 : deallocate(pelat_deg)
884 1536 : deallocate(pelon_deg)
885 1536 : deallocate(pemap)
886 :
887 : ! pearea cannot be deallocated as the attribute object is just pointing
888 : ! to that memory. It can be nullified since the attribute object has
889 : ! the reference.
890 1536 : nullify(pearea)
891 1536 : nullify(pearea_wt)
892 :
893 : ! grid_map cannot be deallocated as the cam_filemap_t object just points
894 : ! to it. It can be nullified.
895 1536 : nullify(grid_map)
896 :
897 : !---------------------------------
898 : ! Create FVM grid object for CSLAM
899 : !---------------------------------
900 :
901 1536 : if (use_cslam) then
902 :
903 1536 : ncols_fvm = nc * nc * nelemd
904 1536 : ngcols_fvm = nc * nc * nelem_d
905 4608 : allocate(fvm_coord(ncols_fvm))
906 3072 : allocate(fvm_map(ncols_fvm))
907 3072 : allocate(fvm_area(ncols_fvm))
908 3072 : allocate(fvm_areawt(ncols_fvm))
909 :
910 12336 : do ie = 1, nelemd
911 : k = 1
912 44736 : do j = 1, nc
913 140400 : do i = 1, nc
914 97200 : mapind = k + ((ie - 1) * nc * nc)
915 97200 : fvm_coord(mapind) = fvm(ie)%center_cart(i,j)%lon*rad2deg
916 97200 : fvm_map(mapind) = k + ((elem(ie)%GlobalId-1) * nc * nc)
917 97200 : fvm_area(mapind) = fvm(ie)%area_sphere(i,j)
918 97200 : fvm_areawt(mapind) = fvm_area(mapind)*rarea_unit_sphere
919 129600 : k = k + 1
920 : end do
921 : end do
922 : end do
923 : lon_coord => horiz_coord_create('lon_fvm', 'ncol_fvm', ngcols_fvm, &
924 : 'longitude', 'degrees_east', 1, size(fvm_coord), fvm_coord, &
925 1536 : map=fvm_map)
926 :
927 12336 : do ie = 1, nelemd
928 : k = 1
929 44736 : do j = 1, nc
930 140400 : do i = 1, nc
931 97200 : mapind = k + ((ie - 1) * nc * nc)
932 97200 : fvm_coord(mapind) = fvm(ie)%center_cart(i,j)%lat*rad2deg
933 129600 : k = k + 1
934 : end do
935 : end do
936 : end do
937 : lat_coord => horiz_coord_create('lat_fvm', 'ncol_fvm', ngcols_fvm, &
938 : 'latitude', 'degrees_north', 1, size(fvm_coord), fvm_coord, &
939 1536 : map=fvm_map)
940 :
941 : ! Map for FVM grid
942 4608 : allocate(grid_map(3, ncols_fvm))
943 390336 : grid_map = 0_iMap
944 1536 : mapind = 1
945 12336 : do j = 1, nelemd
946 109536 : do i = 1, nc*nc
947 97200 : grid_map(1, mapind) = i
948 97200 : grid_map(2, mapind) = j
949 97200 : grid_map(3, mapind) = fvm_map(mapind)
950 108000 : mapind = mapind + 1
951 : end do
952 : end do
953 :
954 : ! create FVM (CSLAM) grid object
955 : call cam_grid_register('FVM', fvm_decomp, lat_coord, lon_coord, &
956 1536 : grid_map, block_indexed=.false., unstruct=.true.)
957 : call cam_grid_attribute_register('FVM', 'area_fvm', 'fvm grid areas', &
958 1536 : 'ncol_fvm', fvm_area, map=fvm_map)
959 : call cam_grid_attribute_register('FVM', 'area_weight_fvm', 'fvm grid area weights', &
960 1536 : 'ncol_fvm', fvm_areawt, map=fvm_map)
961 1536 : call cam_grid_attribute_register('FVM', 'nc', '', nc)
962 1536 : call cam_grid_attribute_register('FVM', 'ne', '', ne)
963 :
964 1536 : deallocate(fvm_coord)
965 1536 : deallocate(fvm_map)
966 1536 : nullify(fvm_area)
967 1536 : nullify(fvm_areawt)
968 1536 : nullify(grid_map)
969 :
970 : end if
971 :
972 : !------------------------------------------------------------------
973 : ! Create grid object for physics grid on the dynamics decomposition
974 : !------------------------------------------------------------------
975 :
976 1536 : if (fv_nphys > 0) then
977 :
978 1536 : ncols_physgrid = fv_nphys * fv_nphys * nelemd
979 1536 : ngcols_physgrid = fv_nphys * fv_nphys * nelem_d
980 4608 : allocate(physgrid_coord(ncols_physgrid))
981 3072 : allocate(physgrid_map(ncols_physgrid))
982 3072 : allocate(physgrid_area(ncols_physgrid))
983 3072 : allocate(physgrid_areawt(ncols_physgrid))
984 :
985 12336 : do ie = 1, nelemd
986 : k = 1
987 44736 : do j = 1, fv_nphys
988 140400 : do i = 1, fv_nphys
989 97200 : mapind = k + ((ie - 1) * fv_nphys * fv_nphys)
990 97200 : physgrid_coord(mapind) = fvm(ie)%center_cart_physgrid(i,j)%lon*rad2deg
991 97200 : physgrid_map(mapind) = k + ((elem(ie)%GlobalId-1) * fv_nphys * fv_nphys)
992 97200 : physgrid_area(mapind) = fvm(ie)%area_sphere_physgrid(i,j)
993 97200 : physgrid_areawt(mapind) = physgrid_area(mapind)*rarea_unit_sphere
994 129600 : k = k + 1
995 : end do
996 : end do
997 : end do
998 : lon_coord => horiz_coord_create('lon', 'ncol', ngcols_physgrid, &
999 : 'longitude', 'degrees_east', 1, size(physgrid_coord), physgrid_coord, &
1000 1536 : map=physgrid_map)
1001 :
1002 12336 : do ie = 1, nelemd
1003 10800 : k = 1
1004 44736 : do j = 1, fv_nphys
1005 140400 : do i = 1, fv_nphys
1006 97200 : mapind = k + ((ie - 1) * fv_nphys * fv_nphys)
1007 97200 : physgrid_coord(mapind) = fvm(ie)%center_cart_physgrid(i,j)%lat*rad2deg
1008 129600 : k = k + 1
1009 : end do
1010 : end do
1011 : end do
1012 : lat_coord => horiz_coord_create('lat', 'ncol', ngcols_physgrid, &
1013 : 'latitude', 'degrees_north', 1, size(physgrid_coord), physgrid_coord, &
1014 1536 : map=physgrid_map)
1015 :
1016 : ! Map for physics grid
1017 4608 : allocate(grid_map(3, ncols_physgrid))
1018 390336 : grid_map = 0_iMap
1019 1536 : mapind = 1
1020 12336 : do j = 1, nelemd
1021 109536 : do i = 1, fv_nphys*fv_nphys
1022 97200 : grid_map(1, mapind) = i
1023 97200 : grid_map(2, mapind) = j
1024 97200 : grid_map(3, mapind) = physgrid_map(mapind)
1025 108000 : mapind = mapind + 1
1026 : end do
1027 : end do
1028 :
1029 : ! create physics grid object
1030 : call cam_grid_register('physgrid_d', physgrid_d, lat_coord, lon_coord, &
1031 1536 : grid_map, block_indexed=.false., unstruct=.true.)
1032 : call cam_grid_attribute_register('physgrid_d', 'area_physgrid', 'physics grid areas', &
1033 1536 : 'ncol', physgrid_area, map=physgrid_map)
1034 : call cam_grid_attribute_register('physgrid_d', 'area_weight_physgrid', 'physics grid area weight', &
1035 1536 : 'ncol', physgrid_areawt, map=physgrid_map)
1036 1536 : call cam_grid_attribute_register('physgrid_d', 'fv_nphys', '', fv_nphys)
1037 1536 : call cam_grid_attribute_register('physgrid_d', 'ne', '', ne)
1038 :
1039 1536 : deallocate(physgrid_coord)
1040 1536 : deallocate(physgrid_map)
1041 1536 : nullify(physgrid_area)
1042 1536 : nullify(physgrid_areawt)
1043 1536 : nullify(grid_map)
1044 :
1045 : end if
1046 :
1047 1536 : nullify(lat_coord) ! Belongs to grid
1048 1536 : nullify(lon_coord) ! Belongs to grid
1049 :
1050 3072 : end subroutine define_cam_grids
1051 :
1052 : !========================================================================================
1053 :
1054 0 : subroutine write_grid_mapping(par, elem)
1055 :
1056 1536 : use parallel_mod, only: parallel_t
1057 : use cam_pio_utils, only: cam_pio_createfile, pio_subsystem
1058 : use pio, only: pio_def_dim, var_desc_t, pio_int, pio_def_var, &
1059 : pio_enddef, pio_closefile, pio_initdecomp, io_desc_t, &
1060 : pio_write_darray, pio_freedecomp
1061 : use dof_mod, only: createmetadata
1062 :
1063 : ! arguments
1064 : type(parallel_t), intent(in) :: par
1065 : type(element_t), intent(in) :: elem(:)
1066 :
1067 : ! local variables
1068 : integer, parameter :: npm12 = (np-1)*(np-1)
1069 :
1070 : type(file_desc_t) :: nc
1071 : type(var_desc_t) :: vid
1072 : type(io_desc_t) :: iodesc
1073 : integer :: dim1, dim2, ierr, i, j, ie, cc, base, ii, jj
1074 0 : integer :: subelement_corners(npm12*nelemd,4)
1075 0 : integer :: dof(npm12*nelemd*4)
1076 : !----------------------------------------------------------------------------
1077 :
1078 : ! Create a CS grid mapping file for postprocessing tools
1079 :
1080 : ! write meta data for physics on GLL nodes
1081 0 : call cam_pio_createfile(nc, 'SEMapping.nc', 0)
1082 :
1083 0 : ierr = pio_def_dim(nc, 'ncenters', npm12*nelem_d, dim1)
1084 0 : ierr = pio_def_dim(nc, 'ncorners', 4, dim2)
1085 0 : ierr = pio_def_var(nc, 'element_corners', PIO_INT, (/dim1,dim2/), vid)
1086 :
1087 0 : ierr = pio_enddef(nc)
1088 0 : call createmetadata(par, elem, subelement_corners)
1089 :
1090 0 : jj=0
1091 0 : do cc = 0, 3
1092 0 : do ie = 1, nelemd
1093 0 : base = ((elem(ie)%globalid-1)+cc*nelem_d)*npm12
1094 0 : ii=0
1095 0 : do j = 1, np-1
1096 0 : do i = 1, np-1
1097 0 : ii=ii+1
1098 0 : jj=jj+1
1099 0 : dof(jj) = base+ii
1100 : end do
1101 : end do
1102 : end do
1103 : end do
1104 :
1105 0 : call pio_initdecomp(pio_subsystem, pio_int, (/nelem_d*npm12,4/), dof, iodesc)
1106 :
1107 : call pio_write_darray(nc, vid, iodesc, &
1108 0 : reshape(subelement_corners, (/nelemd*npm12*4/)), ierr)
1109 :
1110 0 : call pio_freedecomp(nc, iodesc)
1111 :
1112 0 : call pio_closefile(nc)
1113 :
1114 0 : end subroutine write_grid_mapping
1115 :
1116 : !=========================================================================================
1117 :
1118 0 : subroutine create_global_area(area_d)
1119 0 : use dp_mapping, only: dp_reorder, dp_allocate, dp_deallocate
1120 :
1121 : ! Gather global array of column areas for the physics grid,
1122 : ! reorder to global column order, then broadcast it to all tasks.
1123 :
1124 : ! Input variables
1125 : real(r8), pointer :: area_d(:)
1126 :
1127 : ! Local variables
1128 : real(r8) :: areaw(np,np)
1129 0 : real(r8), allocatable :: rbuf(:), dp_area(:,:)
1130 0 : integer :: rdispls(npes), recvcounts(npes)
1131 : integer :: ncol
1132 : integer :: ie, sb, eb, i, j, k
1133 : integer :: ierr
1134 : integer :: ibuf
1135 : character(len=*), parameter :: sub = 'create_global_area'
1136 : !----------------------------------------------------------------------------
1137 :
1138 0 : if (masterproc) then
1139 0 : write(iulog, *) sub//': INFO: Non-scalable action: gathering global area in SE dycore.'
1140 : end if
1141 :
1142 0 : if (fv_nphys > 0) then ! physics uses an FVM grid
1143 :
1144 : ! first gather all data onto masterproc, in mpi task order (via
1145 : ! mpi_gatherv) then redorder into globalID order (via dp_reorder)
1146 0 : ncol = fv_nphys*fv_nphys*nelem_d
1147 0 : allocate(rbuf(ncol))
1148 0 : allocate(dp_area(fv_nphys*fv_nphys,nelem_d))
1149 :
1150 0 : do ie = 1, nelemd
1151 : k = 1
1152 0 : do j = 1, fv_nphys
1153 0 : do i = 1, fv_nphys
1154 0 : dp_area(k,ie) = fvm(ie)%area_sphere_physgrid(i,j)
1155 0 : k = k + 1
1156 : end do
1157 : end do
1158 : end do
1159 :
1160 : call mpi_gather(nelemd*fv_nphys*fv_nphys, 1, mpi_integer, recvcounts, 1, &
1161 0 : mpi_integer, mstrid, mpicom, ierr)
1162 : ! Figure global displacements
1163 0 : if (masterproc) then
1164 0 : rdispls(1) = 0
1165 0 : do ie = 2, npes
1166 0 : rdispls(ie) = rdispls(ie-1) + recvcounts(ie-1)
1167 : end do
1168 : ! Check to make sure we counted correctly
1169 0 : if (rdispls(npes) + recvcounts(npes) /= ncol) then
1170 0 : call endrun(sub//': bad rdispls array size')
1171 : end if
1172 : end if
1173 :
1174 : ! Gather up the areas onto the masterproc
1175 : call mpi_gatherv(dp_area, fv_nphys*fv_nphys*nelemd, mpi_real8, rbuf, &
1176 0 : recvcounts, rdispls, mpi_real8, mstrid, mpicom, ierr)
1177 :
1178 : ! Reorder to global order
1179 0 : call dp_allocate(elem)
1180 0 : if (masterproc) call dp_reorder(rbuf, area_d)
1181 0 : call dp_deallocate()
1182 :
1183 : ! Send everyone else the data
1184 0 : call mpi_bcast(area_d, ncol, mpi_real8, mstrid, mpicom, ierr)
1185 :
1186 0 : deallocate(dp_area)
1187 :
1188 : else ! physics is on the GLL grid
1189 :
1190 0 : allocate(rbuf(ngcols_d))
1191 0 : do ie = 1, nelemdmax
1192 0 : if (ie <= nelemd) then
1193 0 : rdispls(iam+1) = elem(ie)%idxp%UniquePtOffset - 1
1194 0 : eb = rdispls(iam+1) + elem(ie)%idxp%NumUniquePts
1195 0 : recvcounts(iam+1) = elem(ie)%idxP%NumUniquePts
1196 0 : areaw = 1.0_r8 / elem(ie)%rspheremp(:,:)
1197 0 : call UniquePoints(elem(ie)%idxP, areaw, area_d(rdispls(iam+1)+1:eb))
1198 : else
1199 0 : rdispls(iam+1) = 0
1200 0 : recvcounts(iam+1) = 0
1201 : end if
1202 :
1203 0 : ibuf = rdispls(iam+1)
1204 : call mpi_allgather(ibuf, 1, mpi_integer, rdispls, &
1205 0 : 1, mpi_integer, mpicom, ierr)
1206 :
1207 0 : ibuf = recvcounts(iam+1)
1208 : call mpi_allgather(ibuf, 1, mpi_integer, recvcounts, &
1209 0 : 1, mpi_integer, mpicom, ierr)
1210 :
1211 0 : sb = rdispls(iam+1) + 1
1212 0 : eb = rdispls(iam+1) + recvcounts(iam+1)
1213 :
1214 0 : rbuf(1:recvcounts(iam+1)) = area_d(sb:eb)
1215 : call mpi_allgatherv(rbuf, recvcounts(iam+1), mpi_real8, area_d, &
1216 0 : recvcounts(:), rdispls(:), mpi_real8, mpicom, ierr)
1217 : end do
1218 :
1219 : end if
1220 :
1221 0 : deallocate(rbuf)
1222 :
1223 0 : end subroutine create_global_area
1224 :
1225 : !=========================================================================================
1226 :
1227 0 : subroutine create_global_coords(clat, clon, lat_out, lon_out)
1228 : use dp_mapping, only: dp_reorder, dp_allocate, dp_deallocate
1229 :
1230 : ! Gather global arrays of column coordinates for the physics grid,
1231 : ! reorder to global column order, then broadcast to all tasks.
1232 :
1233 : ! arguments
1234 : real(r8), intent(out) :: clat(:)
1235 : real(r8), intent(out) :: clon(:)
1236 : real(r8), optional, intent(out) :: lat_out(:)
1237 : real(r8), optional, intent(out) :: lon_out(:)
1238 :
1239 : ! Local variables
1240 0 : real(r8), allocatable :: rbuf(:), dp_lon(:,:), dp_lat(:,:)
1241 0 : integer :: rdispls(npes), recvcounts(npes)
1242 : integer :: ie, sb, eb, i, j, k
1243 : integer :: ierr
1244 : integer :: ibuf
1245 : integer :: ncol
1246 : character(len=*), parameter :: sub='create_global_coords'
1247 : !----------------------------------------------------------------------------
1248 :
1249 0 : if (masterproc) then
1250 0 : write(iulog, *) sub//': INFO: Non-scalable action: Creating global coords in SE dycore.'
1251 : end if
1252 :
1253 0 : clat(:) = -iam
1254 0 : clon(:) = -iam
1255 0 : if (present(lon_out)) then
1256 0 : lon_out(:) = -iam
1257 : end if
1258 0 : if (present(lat_out)) then
1259 0 : lat_out(:) = -iam
1260 : end if
1261 :
1262 0 : if (fv_nphys > 0) then ! physics uses an FVM grid
1263 :
1264 : ! first gather all data onto masterproc, in mpi task order (via
1265 : ! mpi_gatherv) then redorder into globalID order (via dp_reorder)
1266 :
1267 0 : ncol = fv_nphys*fv_nphys*nelem_d
1268 0 : allocate(rbuf(ncol))
1269 0 : allocate(dp_lon(fv_nphys*fv_nphys,nelem_d))
1270 0 : allocate(dp_lat(fv_nphys*fv_nphys,nelem_d))
1271 :
1272 0 : do ie = 1, nelemd
1273 : k = 1
1274 0 : do j = 1, fv_nphys
1275 0 : do i = 1, fv_nphys
1276 0 : dp_lon(k,ie) = fvm(ie)%center_cart_physgrid(i,j)%lon ! radians
1277 0 : dp_lat(k,ie) = fvm(ie)%center_cart_physgrid(i,j)%lat
1278 0 : k = k + 1
1279 : end do
1280 : end do
1281 : end do
1282 :
1283 : call mpi_gather(nelemd*fv_nphys*fv_nphys, 1, mpi_integer, recvcounts, &
1284 0 : 1, mpi_integer, mstrid, mpicom, ierr)
1285 :
1286 : ! Figure global displacements
1287 0 : if (masterproc) then
1288 0 : rdispls(1) = 0
1289 0 : do ie = 2, npes
1290 0 : rdispls(ie) = rdispls(ie-1) + recvcounts(ie-1)
1291 : end do
1292 : ! Check to make sure we counted correctly
1293 0 : if (rdispls(npes) + recvcounts(npes) /= ncol) then
1294 0 : call endrun(sub//': bad rdispls array size')
1295 : end if
1296 : end if
1297 :
1298 : ! Gather up global latitudes
1299 : call mpi_gatherv(dp_lat, fv_nphys*fv_nphys*nelemd, mpi_real8, rbuf, &
1300 0 : recvcounts, rdispls, mpi_real8, mstrid, mpicom, ierr)
1301 :
1302 : ! Reorder to global order
1303 0 : call dp_allocate(elem)
1304 0 : if (masterproc) call dp_reorder(rbuf, clat)
1305 :
1306 : ! Send everyone else the data
1307 0 : call mpi_bcast(clat, ncol, mpi_real8, mstrid, mpicom, ierr)
1308 :
1309 : ! Gather up global longitudes
1310 : call mpi_gatherv(dp_lon, fv_nphys*fv_nphys*nelemd, mpi_real8, rbuf, &
1311 0 : recvcounts, rdispls, mpi_real8, mstrid, mpicom, ierr)
1312 :
1313 : ! Reorder to global order
1314 0 : if (masterproc) call dp_reorder(rbuf, clon)
1315 0 : call dp_deallocate()
1316 :
1317 : ! Send everyone else the data
1318 0 : call mpi_bcast(clon, ncol, mpi_real8, mstrid, mpicom, ierr)
1319 :
1320 : ! Create degree versions if requested
1321 0 : if (present(lat_out)) then
1322 0 : lat_out(:) = clat(:) * rad2deg
1323 : end if
1324 0 : if (present(lon_out)) then
1325 0 : lon_out(:) = clon(:) * rad2deg
1326 : end if
1327 :
1328 0 : deallocate(dp_lon)
1329 0 : deallocate(dp_lat)
1330 :
1331 : else ! physics uses the GLL grid
1332 :
1333 0 : allocate(rbuf(ngcols_d))
1334 :
1335 0 : do ie = 1, nelemdmax
1336 :
1337 0 : if(ie <= nelemd) then
1338 0 : rdispls(iam+1) = elem(ie)%idxp%UniquePtOffset - 1
1339 0 : eb = rdispls(iam+1) + elem(ie)%idxp%NumUniquePts
1340 0 : recvcounts(iam+1) = elem(ie)%idxP%NumUniquePts
1341 :
1342 0 : call UniqueCoords(elem(ie)%idxP, elem(ie)%spherep, &
1343 0 : clat(rdispls(iam+1)+1:eb), clon(rdispls(iam+1)+1:eb))
1344 :
1345 0 : if (present(lat_out)) then
1346 0 : lat_out(rdispls(iam+1)+1:eb) = clat(rdispls(iam+1)+1:eb) * rad2deg
1347 : end if
1348 :
1349 0 : if (present(lon_out)) then
1350 0 : lon_out(rdispls(iam+1)+1:eb) = clon(rdispls(iam+1)+1:eb) * rad2deg
1351 : end if
1352 :
1353 : else
1354 0 : rdispls(iam+1) = 0
1355 0 : recvcounts(iam+1) = 0
1356 : end if
1357 :
1358 0 : ibuf = rdispls(iam+1)
1359 : call mpi_allgather(ibuf, 1, mpi_integer, rdispls, &
1360 0 : 1, mpi_integer, mpicom, ierr)
1361 :
1362 0 : ibuf = recvcounts(iam+1)
1363 : call mpi_allgather(ibuf, 1, mpi_integer, recvcounts, &
1364 0 : 1, mpi_integer, mpicom, ierr)
1365 :
1366 0 : sb = rdispls(iam+1) + 1
1367 0 : eb = rdispls(iam+1) + recvcounts(iam+1)
1368 :
1369 0 : rbuf(1:recvcounts(iam+1)) = clat(sb:eb) ! whats going to happen if end=0?
1370 : call mpi_allgatherv(rbuf, recvcounts(iam+1), mpi_real8, clat, &
1371 0 : recvcounts(:), rdispls(:), mpi_real8, mpicom, ierr)
1372 :
1373 0 : if (present(lat_out)) then
1374 0 : rbuf(1:recvcounts(iam+1)) = lat_out(sb:eb)
1375 : call mpi_allgatherv(rbuf, recvcounts(iam+1), mpi_real8, lat_out, &
1376 0 : recvcounts(:), rdispls(:), mpi_real8, mpicom, ierr)
1377 : end if
1378 :
1379 0 : rbuf(1:recvcounts(iam+1)) = clon(sb:eb)
1380 : call mpi_allgatherv(rbuf, recvcounts(iam+1), mpi_real8, clon, &
1381 0 : recvcounts(:), rdispls(:), mpi_real8, mpicom, ierr)
1382 :
1383 0 : if (present(lon_out)) then
1384 0 : rbuf(1:recvcounts(iam+1)) = lon_out(sb:eb)
1385 : call mpi_allgatherv(rbuf, recvcounts(iam+1), mpi_real8, lon_out, &
1386 0 : recvcounts(:), rdispls(:), mpi_real8, mpicom, ierr)
1387 : end if
1388 :
1389 : end do ! ie = 1, nelemdmax
1390 :
1391 : end if ! (fv_nphys > 0)
1392 :
1393 0 : end subroutine create_global_coords
1394 :
1395 : !=============================================================================
1396 : !==
1397 : !!!!!! DUMMY INTERFACE TO TEST WEAK SCALING FIX, THIS SHOULD GO AWAY
1398 : !==
1399 : !=============================================================================
1400 :
1401 0 : subroutine get_horiz_grid_d(nxy, clat_d_out, clon_d_out, area_d_out, &
1402 : wght_d_out, lat_d_out, lon_d_out)
1403 :
1404 : ! Return global arrays of latitude and longitude (in radians), column
1405 : ! surface area (in radians squared) and surface integration weights for
1406 : ! global column indices that will be passed to/from physics
1407 :
1408 : ! arguments
1409 : integer, intent(in) :: nxy ! array sizes
1410 :
1411 : real(r8), intent(out), optional :: clat_d_out(:) ! column latitudes
1412 : real(r8), intent(out), optional :: clon_d_out(:) ! column longitudes
1413 : real(r8), intent(out), target, optional :: area_d_out(:) ! column surface
1414 :
1415 : real(r8), intent(out), target, optional :: wght_d_out(:) ! column integration weight
1416 : real(r8), intent(out), optional :: lat_d_out(:) ! column degree latitudes
1417 : real(r8), intent(out), optional :: lon_d_out(:) ! column degree longitudes
1418 : character(len=*), parameter :: subname = 'get_horiz_grid_d'
1419 :
1420 0 : call endrun(subname//': NOT SUPPORTED WITH WEAK SCALING FIX')
1421 0 : end subroutine get_horiz_grid_d
1422 :
1423 : !==============================================================================
1424 :
1425 0 : function get_dyn_grid_parm_real1d(name) result(rval)
1426 :
1427 : ! This routine is not used for SE, but still needed as a dummy interface to satisfy
1428 : ! references from mo_synoz.F90
1429 :
1430 : character(len=*), intent(in) :: name
1431 : real(r8), pointer :: rval(:)
1432 :
1433 0 : if(name.eq.'w') then
1434 0 : call endrun('get_dyn_grid_parm_real1d: w not defined')
1435 0 : else if(name.eq.'clat') then
1436 0 : call endrun('get_dyn_grid_parm_real1d: clat not supported')
1437 0 : else if(name.eq.'latdeg') then
1438 0 : call endrun('get_dyn_grid_parm_real1d: latdeg not defined')
1439 : else
1440 0 : nullify(rval)
1441 : end if
1442 0 : end function get_dyn_grid_parm_real1d
1443 :
1444 : !==============================================================================
1445 :
1446 : end module dyn_grid
|