Line data Source code
1 : module dyn_grid
2 : !-------------------------------------------------------------------------------
3 : !
4 : ! Define SE computational grids on the dynamics decomposition.
5 : !
6 :
7 : ! The grid used by the SE dynamics is called the GLL grid. It is
8 : ! decomposed into elements which correspond to "blocks" in the
9 : ! physics/dynamics coupler terminology. The columns in this grid are
10 : ! located at the Gauss-Lobatto-Legendre (GLL) quadrature points. The GLL
11 : ! grid will also be used by the physics if the CSLAM advection is not used.
12 : ! If CSLAM is used for tracer advection then it uses an FVM grid and the
13 : ! physics will either use the same FVM grid or an FVM grid with a different
14 : ! number of equal area subcells. The FVM grid used by the physics is
15 : ! referred to as the "physgrid".
16 : !
17 : ! Module responsibilities:
18 : !
19 : ! . Provide the physics/dynamics coupler (in module phys_grid) with data for the
20 : ! physics grid on the dynamics decomposition.
21 : !
22 : ! . Create CAM grid objects that are used by the I/O functionality to read
23 : ! data from an unstructured grid format to the dynamics data structures, and
24 : ! to write from the dynamics data structures to unstructured grid format. The
25 : ! global column ordering for the unstructured grid is determined by the SE dycore.
26 : !
27 : !-------------------------------------------------------------------------------
28 :
29 : use shr_kind_mod, only: r8 => shr_kind_r8, shr_kind_cl
30 : use spmd_utils, only: masterproc, iam, mpicom, mstrid=>masterprocid
31 : use spmd_utils, only: npes, mpi_integer, mpi_real8
32 : use constituents, only: pcnst
33 : use physconst, only: pi
34 : use cam_initfiles, only: initial_file_get_id
35 : use cam_grid_support, only: iMap
36 : use physics_column_type, only: physics_column_t
37 :
38 : use cam_logfile, only: iulog
39 : use cam_abortutils, only: endrun
40 :
41 : use pio, only: file_desc_t
42 :
43 : use dimensions_mod, only: globaluniquecols, nelem, nelemd, nelemdmax
44 : use dimensions_mod, only: ne, np, npsq, fv_nphys, nlev, use_cslam
45 : use element_mod, only: element_t
46 : use fvm_control_volume_mod, only: fvm_struct
47 : use hybvcoord_mod, only: hvcoord_t
48 : use prim_init, only: prim_init1
49 : use edge_mod, only: initEdgeBuffer
50 : use edgetype_mod, only: EdgeBuffer_t
51 : use se_dyn_time_mod, only: TimeLevel_t
52 : use dof_mod, only: UniqueCoords, UniquePoints
53 :
54 : implicit none
55 : private
56 : save
57 :
58 : integer, parameter :: dyn_decomp = 101 ! The SE dynamics grid
59 : integer, parameter :: fvm_decomp = 102 ! The FVM (CSLAM) grid
60 : integer, parameter :: physgrid_d = 103 ! physics grid on dynamics decomp
61 : integer, parameter :: ini_decomp = 104 ! alternate dynamics grid for reading initial file
62 : integer, parameter :: ini_decomp_scm = 205 ! alternate dynamics grid for reading initial file
63 : character(len=3), protected :: ini_grid_name
64 :
65 : ! Name of horizontal grid dimension in initial file.
66 : character(len=6), protected :: ini_grid_hdim_name = ''
67 :
68 : integer, parameter :: ptimelevels = 2
69 :
70 : type (TimeLevel_t) :: TimeLevel ! main time level struct (used by tracers)
71 : type (hvcoord_t) :: hvcoord
72 : type(element_t), pointer :: elem(:) => null() ! local GLL elements for this task
73 : type(fvm_struct), pointer :: fvm(:) => null() ! local FVM elements for this task
74 :
75 : public :: dyn_decomp
76 : public :: ini_grid_name
77 : public :: ini_grid_hdim_name
78 : public :: ptimelevels
79 : public :: TimeLevel
80 : public :: hvcoord
81 : public :: elem
82 : public :: fvm
83 : public :: edgebuf
84 :
85 : public :: dyn_grid_init ! Initialize the dynamics grid
86 : public :: get_dyn_grid_info ! Return physics grid column information
87 : public :: physgrid_copy_attributes_d ! Attributes to copy to physics grid
88 :
89 : !!XXgoldyXX: v These need to be removed to complete the weak scaling transition.
90 : public :: get_horiz_grid_d
91 : public :: get_horiz_grid_dim_d
92 : public :: get_dyn_grid_parm
93 : public :: get_dyn_grid_parm_real1d
94 : !!XXgoldyXX: ^ These need to be removed to complete the weak scaling transition.
95 : public :: dyn_grid_get_elem_coords ! get coords of a specified block element
96 :
97 : ! Note: dyn_grid_get_colndx is not implemenented in SE
98 : public :: dyn_grid_get_colndx ! get element block/column and MPI process indices
99 :
100 : ! Namelist variables controlling grid writing.
101 : ! Read in dyn_readnl from dyn_se_inparm group.
102 : character(len=16), public :: se_write_grid_file = 'no'
103 : character(len=shr_kind_cl), public :: se_grid_filename = ''
104 : logical, public :: se_write_gll_corners = .false.
105 :
106 : type(physics_column_t), allocatable, target :: local_dyn_columns(:)
107 :
108 : ! number of global dynamics columns. Set by SE dycore init.
109 : integer :: ngcols_d = 0
110 : ! number of global elements. Set by SE dycore init.
111 : integer :: nelem_d = 0
112 :
113 : real(r8), parameter :: rad2deg = 180.0_r8/pi
114 :
115 : type(EdgeBuffer_t) :: edgebuf
116 :
117 : !=============================================================================
118 : contains
119 : !=============================================================================
120 :
121 1536 : subroutine dyn_grid_init()
122 :
123 : ! Initialize SE grid, and decomposition.
124 :
125 : use hycoef, only: hycoef_init, hypi, hypm, nprlev, &
126 : hyam, hybm, hyai, hybi, ps0
127 : use ref_pres, only: ref_pres_init
128 : use spmd_utils, only: MPI_MAX, MPI_INTEGER, mpicom
129 : use time_manager, only: get_nstep, get_step_size
130 : use dp_mapping, only: dp_init, dp_write
131 : use native_mapping, only: do_native_mapping, create_native_mapping_files
132 :
133 : use parallel_mod, only: par
134 : use hybrid_mod, only: hybrid_t, init_loop_ranges, &
135 : get_loop_ranges, config_thread_region
136 : use control_mod, only: qsplit, rsplit
137 : use se_dyn_time_mod, only: tstep, nsplit
138 : use fvm_mod, only: fvm_init2, fvm_init3, fvm_pg_init
139 : use dimensions_mod, only: irecons_tracer
140 : use comp_gll_ctr_vol, only: gll_grid_write
141 : use air_composition, only: thermodynamic_active_species_num
142 :
143 : ! Local variables
144 :
145 : type(file_desc_t), pointer :: fh_ini
146 :
147 : integer :: qsize_local
148 : integer :: k
149 :
150 : type(hybrid_t) :: hybrid
151 : integer :: ierr
152 : integer :: dtime
153 :
154 1536 : real(r8), allocatable ::clat(:), clon(:), areaa(:)
155 : integer :: nets, nete
156 :
157 : character(len=*), parameter :: sub = 'dyn_grid_init'
158 : !----------------------------------------------------------------------------
159 :
160 : ! Get file handle for initial file and first consistency check
161 1536 : fh_ini => initial_file_get_id()
162 :
163 : ! Initialize hybrid coordinate arrays
164 1536 : call hycoef_init(fh_ini, psdry=.true.)
165 :
166 144384 : hvcoord%hyam = hyam
167 145920 : hvcoord%hyai = hyai
168 144384 : hvcoord%hybm = hybm
169 145920 : hvcoord%hybi = hybi
170 1536 : hvcoord%ps0 = ps0
171 144384 : do k = 1, nlev
172 144384 : hvcoord%hybd(k) = hvcoord%hybi(k+1) - hvcoord%hybi(k)
173 : end do
174 :
175 : ! Initialize reference pressures
176 1536 : call ref_pres_init(hypi, hypm, nprlev)
177 :
178 1536 : if (iam < par%nprocs) then
179 :
180 1536 : call prim_init1(elem, fvm, par, TimeLevel)
181 1536 : if (use_cslam) then
182 1536 : call dp_init(elem, fvm)
183 : end if
184 :
185 1536 : if (fv_nphys > 0) then
186 : qsize_local = 3
187 : else
188 0 : qsize_local = pcnst + 3
189 : end if
190 :
191 1536 : call initEdgeBuffer(par, edgebuf, elem, qsize_local*nlev, nthreads=1)
192 :
193 : else ! auxiliary processes
194 :
195 0 : globaluniquecols = 0
196 0 : nelem = 0
197 0 : nelemd = 0
198 0 : nelemdmax = 0
199 : endif
200 :
201 : ! nelemdmax is computed on the dycore comm, we need it globally.
202 1536 : ngcols_d = nelemdmax
203 1536 : call MPI_Allreduce(ngcols_d, nelemdmax, 1, MPI_INTEGER, MPI_MAX, mpicom, ierr)
204 : ! All pes might not have the correct global grid size
205 1536 : call MPI_Allreduce(globaluniquecols, ngcols_d, 1, MPI_INTEGER, MPI_MAX, mpicom, ierr)
206 : ! All pes might not have the correct number of elements
207 1536 : call MPI_Allreduce(nelem, nelem_d, 1, MPI_INTEGER, MPI_MAX, mpicom, ierr)
208 :
209 : ! nelemd (# of elements on this task) is set by prim_init1
210 1536 : call init_loop_ranges(nelemd)
211 :
212 : ! Dynamics timestep
213 : !
214 : ! Note: dtime = timestep for physics/dynamics coupling
215 : ! tstep = the dynamics timestep:
216 1536 : dtime = get_step_size()
217 1536 : tstep = dtime / real(nsplit*qsplit*rsplit, r8)
218 1536 : TimeLevel%nstep = get_nstep()*nsplit*qsplit*rsplit
219 :
220 : ! initial SE (subcycled) nstep
221 1536 : TimeLevel%nstep0 = 0
222 :
223 : ! determine whether initial file uses 'ncol' or 'ncol_d'
224 1536 : call get_hdim_name(fh_ini, ini_grid_hdim_name)
225 :
226 : ! Define the dynamics and physics grids on the dynamics decompostion.
227 : ! Physics grid on the physics decomposition is defined in phys_grid_init.
228 1536 : call define_cam_grids()
229 :
230 1536 : if (fv_nphys > 0) then
231 :
232 : ! ================================================
233 : ! finish fvm initialization
234 : ! ================================================
235 :
236 1536 : if (iam < par%nprocs) then
237 1536 : hybrid = config_thread_region(par,'serial')
238 1536 : call get_loop_ranges(hybrid, ibeg=nets, iend=nete)
239 :
240 : ! initialize halo coordinate variables for cslam and physgrid
241 1536 : call fvm_init2(elem, fvm, hybrid, nets, nete)
242 1536 : call fvm_pg_init(elem, fvm, hybrid, nets, nete, irecons_tracer)
243 1536 : call fvm_init3(elem, fvm, hybrid, nets, nete, irecons_tracer)
244 : end if
245 :
246 : end if
247 :
248 : ! write grid and mapping files
249 1536 : if (se_write_gll_corners) then
250 0 : call write_grid_mapping(par, elem)
251 : end if
252 :
253 1536 : if (trim(se_write_grid_file) /= "no") then
254 0 : if (fv_nphys > 0) then
255 0 : call dp_write(elem, fvm, trim(se_write_grid_file), trim(se_grid_filename))
256 : else
257 0 : call gll_grid_write(elem, trim(se_write_grid_file), trim(se_grid_filename))
258 : end if
259 : end if
260 :
261 1536 : if (do_native_mapping) then
262 :
263 0 : allocate(areaA(ngcols_d))
264 0 : allocate(clat(ngcols_d),clon(ngcols_d))
265 0 : call get_horiz_grid_int(ngcols_d, clat_d_out=clat, clon_d_out=clon, area_d_out=areaA)
266 :
267 : ! Create mapping files using SE basis functions
268 0 : call create_native_mapping_files(par, elem, 'native', ngcols_d, clat, clon, areaa)
269 0 : call create_native_mapping_files(par, elem, 'bilin', ngcols_d, clat, clon, areaa)
270 :
271 0 : deallocate(areaa, clat, clon)
272 : end if
273 :
274 1536 : call mpi_barrier(mpicom, ierr)
275 :
276 1536 : end subroutine dyn_grid_init
277 :
278 : !==============================================================================
279 :
280 1536 : subroutine get_dyn_grid_info(hdim1_d, hdim2_d, num_lev, &
281 : index_model_top_layer, index_surface_layer, unstructured, dyn_columns)
282 : !------------------------------------------------------------
283 : !
284 : ! get_dyn_grid_info returns physics grid column information
285 : !
286 : !------------------------------------------------------------
287 1536 : use shr_const_mod, only: SHR_CONST_PI
288 : use cam_abortutils, only: endrun
289 : use spmd_utils, only: iam
290 : use coordinate_systems_mod, only: spherical_polar_t
291 : ! Dummy arguments
292 : integer, intent(out) :: hdim1_d ! # longitudes or grid size
293 : integer, intent(out) :: hdim2_d ! # latitudes or 1
294 : integer, intent(out) :: num_lev ! # levels
295 : integer, intent(out) :: index_model_top_layer
296 : integer, intent(out) :: index_surface_layer
297 : logical, intent(out) :: unstructured
298 : ! dyn_columns will contain a copy of the physics column info local to this
299 : ! dynamics task
300 : type(physics_column_t), allocatable, intent(out) :: dyn_columns(:)
301 : ! Local variables
302 : integer :: lindex
303 : integer :: gindex
304 : integer :: elem_ind, col_ind, ii, jj
305 : integer :: num_local_cols
306 : type(spherical_polar_t) :: coord
307 : real(r8) :: dcoord
308 : real(r8), parameter :: radtodeg = 180.0_r8 / SHR_CONST_PI
309 : real(r8), parameter :: degtorad = SHR_CONST_PI / 180.0_r8
310 : character(len=*), parameter :: subname = 'get_dyn_grid_info'
311 :
312 1536 : unstructured = .true. ! SE is an unstructured dycore
313 1536 : if (fv_nphys > 0) then ! physics uses an FVM grid
314 1536 : num_local_cols = nelemd * fv_nphys * fv_nphys
315 : else
316 0 : num_local_cols = 0
317 0 : do elem_ind = 1, nelemd
318 0 : num_local_cols = num_local_cols + elem(elem_ind)%idxP%NumUniquePts
319 : end do
320 : end if
321 1536 : if (allocated(local_dyn_columns)) then
322 : ! Check for correct number of columns
323 0 : if (size(local_dyn_columns) /= num_local_cols) then
324 0 : call endrun(subname//': called with inconsistent column numbers')
325 : end if
326 : else
327 104880 : allocate(local_dyn_columns(num_local_cols))
328 1536 : if (fv_nphys > 0) then ! physics uses an FVM grid
329 1536 : hdim1_d = nelem * fv_nphys * fv_nphys
330 : else
331 0 : hdim1_d = ngcols_d
332 : end if
333 1536 : hdim2_d = 1
334 1536 : num_lev = nlev
335 1536 : index_model_top_layer = 1
336 1536 : index_surface_layer = nlev
337 1536 : lindex = 0
338 12336 : do elem_ind = 1, nelemd
339 12336 : if (fv_nphys > 0) then ! physics uses an FVM grid
340 108000 : do col_ind = 0, (fv_nphys * fv_nphys) - 1
341 97200 : lindex = lindex + 1
342 97200 : ii = MOD(col_ind, fv_nphys) + 1
343 97200 : jj = (col_ind / fv_nphys) + 1
344 97200 : coord = fvm(elem_ind)%center_cart_physgrid(ii, jj)
345 97200 : local_dyn_columns(lindex)%lat_rad = coord%lat
346 97200 : dcoord = local_dyn_columns(lindex)%lat_rad * radtodeg
347 97200 : local_dyn_columns(lindex)%lat_deg = dcoord
348 97200 : local_dyn_columns(lindex)%lon_rad = coord%lon
349 97200 : dcoord = local_dyn_columns(lindex)%lon_rad * radtodeg
350 97200 : local_dyn_columns(lindex)%lon_deg = dcoord
351 0 : local_dyn_columns(lindex)%area = &
352 97200 : fvm(elem_ind)%area_sphere_physgrid(ii,jj)
353 0 : local_dyn_columns(lindex)%weight = &
354 97200 : local_dyn_columns(lindex)%area
355 : ! File decomposition
356 0 : gindex = ((elem(elem_ind)%GlobalId-1) * fv_nphys * fv_nphys) + &
357 97200 : col_ind + 1
358 97200 : local_dyn_columns(lindex)%global_col_num = gindex
359 : ! Note, coord_indices not used for unstructured dycores
360 : ! Dynamics decomposition
361 97200 : local_dyn_columns(lindex)%dyn_task = iam
362 97200 : local_dyn_columns(lindex)%local_dyn_block = elem_ind
363 : local_dyn_columns(lindex)%global_dyn_block = &
364 97200 : elem(elem_ind)%GlobalId
365 97200 : allocate(local_dyn_columns(lindex)%dyn_block_index(1))
366 108000 : local_dyn_columns(lindex)%dyn_block_index(1) = col_ind + 1
367 : end do
368 : else
369 0 : do col_ind = 1, elem(elem_ind)%idxP%NumUniquePts
370 0 : lindex = lindex + 1
371 0 : ii = elem(elem_ind)%idxP%ia(col_ind)
372 0 : jj = elem(elem_ind)%idxP%ja(col_ind)
373 :
374 0 : dcoord = elem(elem_ind)%spherep(ii,jj)%lat
375 0 : local_dyn_columns(lindex)%lat_rad = dcoord
376 0 : dcoord = local_dyn_columns(lindex)%lat_rad * radtodeg
377 0 : local_dyn_columns(lindex)%lat_deg = dcoord
378 0 : dcoord = elem(elem_ind)%spherep(ii,jj)%lon
379 0 : local_dyn_columns(lindex)%lon_rad = dcoord
380 0 : dcoord = local_dyn_columns(lindex)%lon_rad * radtodeg
381 0 : local_dyn_columns(lindex)%lon_deg = dcoord
382 0 : local_dyn_columns(lindex)%area = &
383 0 : 1.0_r8 / elem(elem_ind)%rspheremp(ii,jj)
384 0 : local_dyn_columns(lindex)%weight = local_dyn_columns(lindex)%area
385 : ! File decomposition
386 0 : gindex = elem(elem_ind)%idxP%UniquePtoffset + col_ind - 1
387 0 : local_dyn_columns(lindex)%global_col_num = gindex
388 : ! Note, coord_indices not used for unstructured dycores
389 : ! Dynamics decomposition
390 0 : local_dyn_columns(lindex)%dyn_task = iam
391 0 : local_dyn_columns(lindex)%local_dyn_block = elem_ind
392 : local_dyn_columns(lindex)%global_dyn_block = &
393 0 : elem(elem_ind)%GlobalId
394 0 : allocate(local_dyn_columns(lindex)%dyn_block_index(1))
395 0 : local_dyn_columns(lindex)%dyn_block_index(1) = col_ind
396 : end do
397 : end if
398 : end do
399 : end if
400 : ! Copy the information to the output array
401 1536 : if (allocated(dyn_columns)) then
402 0 : deallocate(dyn_columns)
403 : end if
404 104880 : allocate(dyn_columns(lindex))
405 98736 : do lindex = 1, num_local_cols
406 98736 : dyn_columns(lindex) = local_dyn_columns(lindex)
407 : end do
408 :
409 1536 : end subroutine get_dyn_grid_info
410 :
411 : !==============================================================================
412 :
413 1536 : subroutine get_horiz_grid_dim_d(hdim1_d,hdim2_d)
414 :
415 : ! Returns declared horizontal dimensions of computational grid.
416 : ! For non-lon/lat grids, declare grid to be one-dimensional,
417 : ! i.e., (ngcols_d x 1)
418 :
419 : !------------------------------Arguments--------------------------------
420 : integer, intent(out) :: hdim1_d ! first horizontal dimension
421 : integer, intent(out), optional :: hdim2_d ! second horizontal dimension
422 : !-----------------------------------------------------------------------
423 :
424 1536 : if (fv_nphys > 0) then
425 1536 : hdim1_d = fv_nphys*fv_nphys*nelem_d
426 : else
427 0 : hdim1_d = ngcols_d
428 : end if
429 1536 : if (present(hdim2_d)) then
430 1536 : hdim2_d = 1
431 : end if
432 :
433 1536 : end subroutine get_horiz_grid_dim_d
434 :
435 : !=========================================================================================
436 :
437 0 : subroutine get_horiz_grid_int(nxy, clat_d_out, clon_d_out, area_d_out, &
438 0 : wght_d_out, lat_d_out, lon_d_out)
439 :
440 : ! Return global arrays of latitude and longitude (in radians), column
441 : ! surface area (in radians squared) and surface integration weights for
442 : ! global column indices that will be passed to/from physics
443 :
444 : ! arguments
445 : integer, intent(in) :: nxy ! array sizes
446 :
447 : real(r8), intent(out), optional :: clat_d_out(:) ! column latitudes
448 : real(r8), intent(out), optional :: clon_d_out(:) ! column longitudes
449 : real(r8), intent(out), target, optional :: area_d_out(:) ! column surface
450 :
451 : real(r8), intent(out), target, optional :: wght_d_out(:) ! column integration weight
452 : real(r8), intent(out), optional :: lat_d_out(:) ! column degree latitudes
453 : real(r8), intent(out), optional :: lon_d_out(:) ! column degree longitudes
454 :
455 : ! local variables
456 0 : real(r8), pointer :: area_d(:)
457 0 : real(r8), pointer :: temp(:)
458 : character(len=SHR_KIND_CL) :: errormsg
459 : character(len=*), parameter :: sub = 'get_horiz_grid_int'
460 : !----------------------------------------------------------------------------
461 :
462 : ! check that nxy is set to correct size for global arrays
463 0 : if (fv_nphys > 0) then
464 0 : if (nxy < fv_nphys*fv_nphys*nelem_d) then
465 0 : write(errormsg, *) sub//': arrays too small; Passed', &
466 0 : nxy, ', needs to be at least', fv_nphys*fv_nphys*nelem_d
467 0 : call endrun(errormsg)
468 : end if
469 : else
470 0 : if (nxy < ngcols_d) then
471 0 : write(errormsg,*) sub//': arrays not large enough; ', &
472 0 : 'Passed', nxy, ', needs to be at least', ngcols_d
473 0 : call endrun(errormsg)
474 : end if
475 : end if
476 :
477 0 : if ( present(area_d_out) ) then
478 0 : if (size(area_d_out) /= nxy) then
479 0 : call endrun(sub//': bad area_d_out array size')
480 : end if
481 0 : area_d => area_d_out
482 0 : call create_global_area(area_d)
483 :
484 0 : else if ( present(wght_d_out) ) then
485 0 : if (size(wght_d_out) /= nxy) then
486 0 : call endrun(sub//': bad wght_d_out array size')
487 : end if
488 0 : area_d => wght_d_out
489 0 : call create_global_area(area_d)
490 :
491 : end if
492 :
493 : ! If one of area_d_out or wght_d_out was present, then it was computed
494 : ! above. If they were *both* present, then do this:
495 0 : if ( present(area_d_out) .and. present(wght_d_out) ) then
496 0 : wght_d_out(:) = area_d_out(:)
497 : end if
498 :
499 0 : if (present(clon_d_out)) then
500 0 : if (size(clon_d_out) /= nxy) then
501 0 : call endrun(sub//': bad clon_d_out array size in dyn_grid')
502 : end if
503 : end if
504 :
505 0 : if (present(clat_d_out)) then
506 :
507 0 : if (size(clat_d_out) /= nxy) then
508 0 : call endrun('bad clat_d_out array size in dyn_grid')
509 : end if
510 :
511 0 : if (present(clon_d_out)) then
512 0 : call create_global_coords(clat_d_out, clon_d_out, lat_d_out, lon_d_out)
513 : else
514 0 : allocate(temp(nxy))
515 0 : call create_global_coords(clat_d_out, temp, lat_d_out, lon_d_out)
516 0 : deallocate(temp)
517 : end if
518 :
519 0 : else if (present(clon_d_out)) then
520 :
521 0 : allocate(temp(nxy))
522 0 : call create_global_coords(temp, clon_d_out, lat_d_out, lon_d_out)
523 0 : deallocate(temp)
524 :
525 : end if
526 :
527 0 : end subroutine get_horiz_grid_int
528 :
529 : !=========================================================================================
530 :
531 1536 : subroutine physgrid_copy_attributes_d(gridname, grid_attribute_names)
532 :
533 : ! create list of attributes for the physics grid that should be copied
534 : ! from the corresponding grid object on the dynamics decomposition
535 :
536 : use cam_grid_support, only: max_hcoordname_len
537 :
538 : ! Dummy arguments
539 : character(len=max_hcoordname_len), intent(out) :: gridname
540 : character(len=max_hcoordname_len), pointer, intent(out) :: grid_attribute_names(:)
541 :
542 1536 : if (fv_nphys > 0) then
543 1536 : gridname = 'physgrid_d'
544 1536 : allocate(grid_attribute_names(2))
545 1536 : grid_attribute_names(1) = 'fv_nphys'
546 1536 : grid_attribute_names(2) = 'ne'
547 : else
548 0 : gridname = 'GLL'
549 0 : allocate(grid_attribute_names(2))
550 0 : grid_attribute_names(1) = 'np'
551 0 : grid_attribute_names(2) = 'ne'
552 : end if
553 :
554 1536 : end subroutine physgrid_copy_attributes_d
555 :
556 : !=========================================================================================
557 :
558 138240 : integer function get_dyn_grid_parm(name) result(ival)
559 :
560 : ! This function is in the process of being deprecated, but is still needed
561 : ! as a dummy interface to satisfy external references from some chemistry routines.
562 :
563 1536 : use pmgrid, only: plat, plev
564 :
565 : character(len=*), intent(in) :: name
566 : !----------------------------------------------------------------------------
567 :
568 138240 : if (name.eq.'plat') then
569 : ival = plat
570 69120 : else if(name.eq.'plon') then
571 69120 : if (fv_nphys>0) then
572 69120 : ival = fv_nphys*fv_nphys*nelem_d
573 : else
574 0 : ival = ngcols_d
575 : end if
576 0 : else if(name.eq.'plev') then
577 : ival = plev
578 :
579 : else
580 0 : ival = -1
581 : end if
582 :
583 138240 : end function get_dyn_grid_parm
584 :
585 : !=========================================================================================
586 :
587 0 : subroutine dyn_grid_get_colndx(igcol, ncols, owners, col, lbk)
588 :
589 : ! For each global column index return the owning task. If the column is owned
590 : ! by this task, then also return the local block number and column index in that
591 : ! block.
592 : !
593 : ! NOTE: this routine needs to be updated for the physgrid
594 :
595 : integer, intent(in) :: ncols
596 : integer, intent(in) :: igcol(ncols)
597 : integer, intent(out) :: owners(ncols)
598 : integer, intent(out) :: col(ncols)
599 : integer, intent(out) :: lbk(ncols)
600 :
601 : !----------------------------------------------------------------------------
602 :
603 0 : owners = (igcol * 0) -1 ! Kill compiler warnings
604 0 : col = -1 ! Kill compiler warnings
605 0 : lbk = -1 ! Kill compiler warnings
606 0 : call endrun('dyn_grid_get_colndx: not implemented for unstructured grids')
607 :
608 0 : end subroutine dyn_grid_get_colndx
609 :
610 : !=========================================================================================
611 :
612 0 : subroutine dyn_grid_get_elem_coords(ie, rlon, rlat, cdex)
613 :
614 : ! Returns coordinates of a specified block element of the dyn grid
615 : !
616 : ! NB: This routine only uses the GLL points (i.e, it ignores the physics
617 : ! grid). This is probably OK as current use is only for dyn_decomp
618 : ! variables in history.
619 :
620 : integer, intent(in) :: ie ! block element index
621 :
622 : real(r8),optional, intent(out) :: rlon(:) ! longitudes of the columns in the element
623 : real(r8),optional, intent(out) :: rlat(:) ! latitudes of the columns in the element
624 : integer, optional, intent(out) :: cdex(:) ! global column index
625 :
626 : integer :: sb,eb, ii, i,j, icol, igcol
627 0 : real(r8), allocatable :: clat(:), clon(:)
628 : !----------------------------------------------------------------------------
629 :
630 0 : if (fv_nphys > 0) then
631 0 : call endrun('dyn_grid_get_colndx: not implemented for the FVM physics grid')
632 : end if
633 :
634 0 : sb = elem(ie)%idxp%UniquePtOffset
635 0 : eb = sb + elem(ie)%idxp%NumUniquePts-1
636 :
637 0 : allocate( clat(sb:eb), clon(sb:eb) )
638 0 : call UniqueCoords( elem(ie)%idxP, elem(ie)%spherep, clat(sb:eb), clon(sb:eb) )
639 :
640 0 : if (present(cdex)) cdex(:) = -1
641 0 : if (present(rlat)) rlat(:) = -999._r8
642 0 : if (present(rlon)) rlon(:) = -999._r8
643 :
644 0 : do ii=1,elem(ie)%idxp%NumUniquePts
645 0 : i=elem(ie)%idxp%ia(ii)
646 0 : j=elem(ie)%idxp%ja(ii)
647 0 : icol = i+(j-1)*np
648 0 : igcol = elem(ie)%idxp%UniquePtoffset+ii-1
649 0 : if (present(cdex)) cdex(icol) = igcol
650 0 : if (present(rlat)) rlat(icol) = clat( igcol )
651 0 : if (present(rlon)) rlon(icol) = clon( igcol )
652 : end do
653 :
654 0 : deallocate( clat, clon )
655 :
656 0 : end subroutine dyn_grid_get_elem_coords
657 :
658 : !=========================================================================================
659 : ! Private routines.
660 : !=========================================================================================
661 :
662 1536 : subroutine get_hdim_name(fh_ptr, grid_hdim_name)
663 : use pio, only: pio_inq_dimid, pio_seterrorhandling
664 : use pio, only: PIO_BCAST_ERROR, PIO_NOERR
665 :
666 : ! Determine whether the supplied file uses 'ncol' or 'ncol_d' horizontal
667 : ! dimension in the unstructured grid. It is also possible when using
668 : ! analytic initial conditions that the file only contains
669 : ! vertical coordinates.
670 : ! Return 'none' if that is the case.
671 :
672 : ! Arguments
673 : type(file_desc_t), pointer :: fh_ptr
674 : character(len=6), intent(out) :: grid_hdim_name ! horizontal dimension name
675 :
676 : ! local variables
677 : integer :: ierr, pio_errtype
678 : integer :: ncol_did
679 :
680 : character(len=*), parameter :: sub = 'get_hdim_name'
681 : !----------------------------------------------------------------------------
682 :
683 : ! Set PIO to return error flags.
684 1536 : call pio_seterrorhandling(fh_ptr, PIO_BCAST_ERROR, pio_errtype)
685 :
686 : ! Check for ncol_d first just in case the file also contains fields on
687 : ! the physics grid.
688 1536 : ierr = pio_inq_dimid(fh_ptr, 'ncol_d', ncol_did)
689 1536 : if (ierr == PIO_NOERR) then
690 :
691 768 : grid_hdim_name = 'ncol_d'
692 :
693 : else
694 :
695 : ! if 'ncol_d' not in file, check for 'ncol'
696 768 : ierr = pio_inq_dimid(fh_ptr, 'ncol', ncol_did)
697 :
698 768 : if (ierr == PIO_NOERR) then
699 :
700 768 : grid_hdim_name = 'ncol'
701 :
702 : else
703 :
704 0 : grid_hdim_name = 'none'
705 :
706 : end if
707 : end if
708 :
709 : ! Return PIO to previous error handling.
710 1536 : call pio_seterrorhandling(fh_ptr, pio_errtype)
711 :
712 1536 : end subroutine get_hdim_name
713 :
714 1536 : subroutine define_cam_grids()
715 :
716 : ! Create grid objects on the dynamics decomposition for grids used by
717 : ! the dycore. The decomposed grid object contains data for the elements
718 : ! in each task and information to map that data to the global grid.
719 : !
720 : ! Notes on dynamic memory management:
721 : !
722 : ! . Coordinate values and the map passed to the horiz_coord_create
723 : ! method are copied to the object. The memory may be deallocated
724 : ! after the object is created.
725 : !
726 : ! . The area values passed to cam_grid_attribute_register are only pointed
727 : ! to by the attribute object, so that memory cannot be deallocated. But the
728 : ! map is copied.
729 : !
730 : ! . The grid_map passed to cam_grid_register is just pointed to.
731 : ! Cannot be deallocated.
732 :
733 : use cam_grid_support, only: horiz_coord_t, horiz_coord_create
734 : use cam_grid_support, only: cam_grid_register, cam_grid_attribute_register
735 : use dimensions_mod, only: nc
736 : use shr_const_mod, only: PI => SHR_CONST_PI
737 : use scamMod, only: closeioplon,closeioplat,closeioplonidx,single_column
738 : ! Local variables
739 : integer :: i, ii, j, k, ie, mapind
740 : character(len=8) :: latname, lonname, ncolname, areaname
741 :
742 : type(horiz_coord_t), pointer :: lat_coord
743 : type(horiz_coord_t), pointer :: lon_coord
744 1536 : integer(iMap), pointer :: grid_map(:,:)
745 1536 : integer(iMap), pointer :: grid_map_scm(:,:) !grid_map decomp for single column mode
746 :
747 1536 : real(r8), allocatable :: pelat_deg(:) ! pe-local latitudes (degrees)
748 1536 : real(r8), allocatable :: pelon_deg(:) ! pe-local longitudes (degrees)
749 1536 : real(r8), pointer :: pearea(:) ! pe-local areas
750 1536 : real(r8), pointer :: pearea_wt(:) ! pe-local areas normalized for unit sphere
751 3072 : integer(iMap) :: fdofP_local(npsq,nelemd) ! pe-local map for dynamics decomp
752 1536 : integer(iMap), allocatable :: pemap(:) ! pe-local map for PIO decomp
753 1536 : integer(iMap), allocatable :: pemap_scm(:) ! pe-local map for single column PIO decomp
754 : real(r8) :: latval(1),lonval(1)
755 :
756 : integer :: ncols_fvm, ngcols_fvm
757 1536 : real(r8), allocatable :: fvm_coord(:)
758 1536 : real(r8), pointer :: fvm_area(:)
759 1536 : real(r8), pointer :: fvm_areawt(:)
760 1536 : integer(iMap), pointer :: fvm_map(:)
761 :
762 : integer :: ncols_physgrid, ngcols_physgrid
763 1536 : real(r8), allocatable :: physgrid_coord(:)
764 1536 : real(r8), pointer :: physgrid_area(:)
765 1536 : real(r8), pointer :: physgrid_areawt(:)
766 1536 : integer(iMap), pointer :: physgrid_map(:)
767 :
768 : real(r8), parameter :: rarea_unit_sphere = 1.0_r8 / (4.0_r8*PI)
769 :
770 : !----------------------------------------------------------------------------
771 :
772 : !-----------------------
773 : ! initialize pointers to null
774 : !-----------------------
775 1536 : nullify(pearea_wt)
776 1536 : nullify(pearea)
777 1536 : nullify(fvm_area)
778 1536 : nullify(fvm_areawt)
779 1536 : nullify(fvm_map)
780 1536 : nullify(physgrid_area)
781 1536 : nullify(physgrid_areawt)
782 1536 : nullify(physgrid_map)
783 1536 : nullify(grid_map)
784 :
785 : !-----------------------
786 : ! Create GLL grid object
787 : !-----------------------
788 :
789 : ! Calculate the mapping between element GLL points and file order
790 185136 : fdofp_local = 0_iMap
791 12336 : do ie = 1, nelemd
792 109540 : do ii = 1, elem(ie)%idxP%NumUniquePts
793 97204 : i = elem(ie)%idxP%ia(ii)
794 97204 : j = elem(ie)%idxP%ja(ii)
795 108004 : fdofp_local((np*(j-1))+i,ie) = elem(ie)%idxP%UniquePtoffset + ii - 1
796 : end do
797 : end do
798 :
799 6144 : allocate(pelat_deg(np*np*nelemd))
800 3072 : allocate(pelon_deg(np*np*nelemd))
801 3072 : allocate(pearea(np*np*nelemd))
802 3072 : allocate(pearea_wt(np*np*nelemd))
803 3072 : allocate(pemap(np*np*nelemd))
804 :
805 174336 : pemap = 0_iMap
806 : ii = 1
807 12336 : do ie = 1, nelemd
808 183600 : pemap(ii:ii+npsq-1) = fdofp_local(:,ie)
809 55536 : do j = 1, np
810 226800 : do i = 1, np
811 172800 : pearea(ii) = elem(ie)%mp(i,j)*elem(ie)%metdet(i,j)
812 172800 : pearea_wt(ii) = pearea(ii)*rarea_unit_sphere
813 172800 : pelat_deg(ii) = elem(ie)%spherep(i,j)%lat * rad2deg
814 172800 : pelon_deg(ii) = elem(ie)%spherep(i,j)%lon * rad2deg
815 216000 : ii = ii + 1
816 : end do
817 : end do
818 : end do
819 :
820 : ! If using the physics grid then the GLL grid will use the names with
821 : ! '_d' suffixes and the physics grid will use the unadorned names.
822 : ! This allows fields on both the GLL and physics grids to be written to history
823 : ! output files.
824 1536 : if (trim(ini_grid_hdim_name) == 'ncol_d') then
825 768 : latname = 'lat_d'
826 768 : lonname = 'lon_d'
827 768 : ncolname = 'ncol_d'
828 768 : areaname = 'area_d'
829 : else
830 768 : latname = 'lat'
831 768 : lonname = 'lon'
832 768 : ncolname = 'ncol'
833 768 : areaname = 'area'
834 : end if
835 : lat_coord => horiz_coord_create('lat_d', 'ncol_d', ngcols_d, &
836 1536 : 'latitude', 'degrees_north', 1, size(pelat_deg), pelat_deg, map=pemap)
837 : lon_coord => horiz_coord_create('lon_d', 'ncol_d', ngcols_d, &
838 1536 : 'longitude', 'degrees_east', 1, size(pelon_deg), pelon_deg, map=pemap)
839 :
840 : ! Map for GLL grid
841 4608 : allocate(grid_map(3,npsq*nelemd))
842 692736 : grid_map = 0_iMap
843 : mapind = 1
844 12336 : do j = 1, nelemd
845 185136 : do i = 1, npsq
846 172800 : grid_map(1, mapind) = i
847 172800 : grid_map(2, mapind) = j
848 172800 : grid_map(3, mapind) = pemap(mapind)
849 183600 : mapind = mapind + 1
850 : end do
851 : end do
852 :
853 : ! The native SE GLL grid
854 : call cam_grid_register('GLL', dyn_decomp, lat_coord, lon_coord, &
855 1536 : grid_map, block_indexed=.false., unstruct=.true.)
856 : call cam_grid_attribute_register('GLL', 'area_d', 'gll grid areas', &
857 1536 : 'ncol_d', pearea, map=pemap)
858 : call cam_grid_attribute_register('GLL', 'area_weight_gll', 'gll grid area weights', &
859 1536 : 'ncol_d', pearea_wt, map=pemap)
860 1536 : call cam_grid_attribute_register('GLL', 'np', '', np)
861 1536 : call cam_grid_attribute_register('GLL', 'ne', '', ne)
862 :
863 : ! If dim name is 'ncol', create INI grid
864 : ! We will read from INI grid, but use GLL grid for all output
865 1536 : if (trim(ini_grid_hdim_name) == 'ncol') then
866 : lat_coord => horiz_coord_create('lat', 'ncol', ngcols_d, &
867 768 : 'latitude', 'degrees_north', 1, size(pelat_deg), pelat_deg, map=pemap)
868 : lon_coord => horiz_coord_create('lon', 'ncol', ngcols_d, &
869 768 : 'longitude', 'degrees_east', 1, size(pelon_deg), pelon_deg, map=pemap)
870 :
871 : call cam_grid_register('INI', ini_decomp, lat_coord, lon_coord, &
872 768 : grid_map, block_indexed=.false., unstruct=.true.)
873 : call cam_grid_attribute_register('INI', 'area', 'ini grid areas', &
874 768 : 'ncol', pearea, map=pemap)
875 : call cam_grid_attribute_register('INI', 'area_weight_ini', 'ini grid area weights', &
876 768 : 'ncol', pearea_wt, map=pemap)
877 :
878 768 : ini_grid_name = 'INI'
879 : else
880 : ! The dyn_decomp grid can be used to read the initial file.
881 768 : ini_grid_name = 'GLL'
882 : end if
883 :
884 : ! Coordinate values and maps are copied into the coordinate and attribute objects.
885 : ! Locally allocated storage is no longer needed.
886 1536 : deallocate(pelat_deg)
887 1536 : deallocate(pelon_deg)
888 1536 : deallocate(pemap)
889 :
890 : ! pearea cannot be deallocated as the attribute object is just pointing
891 : ! to that memory. It can be nullified since the attribute object has
892 : ! the reference.
893 1536 : nullify(pearea)
894 1536 : nullify(pearea_wt)
895 :
896 : ! grid_map cannot be deallocated as the cam_filemap_t object just points
897 : ! to it. It can be nullified.
898 1536 : nullify(grid_map)
899 :
900 : !---------------------------------
901 : ! Create SCM grid object when running single column mode
902 : !---------------------------------
903 :
904 1536 : if ( single_column) then
905 0 : allocate(pemap_scm(1))
906 0 : pemap_scm = 0_iMap
907 0 : pemap_scm = closeioplonidx
908 :
909 : ! Map for scm grid
910 0 : allocate(grid_map_scm(3,npsq))
911 0 : grid_map_scm = 0_iMap
912 : mapind = 1
913 0 : j = 1
914 0 : do i = 1, npsq
915 0 : grid_map_scm(1, mapind) = i
916 0 : grid_map_scm(2, mapind) = j
917 0 : grid_map_scm(3, mapind) = pemap_scm(1)
918 0 : mapind = mapind + 1
919 : end do
920 0 : latval=closeioplat
921 0 : lonval=closeioplon
922 :
923 : lat_coord => horiz_coord_create('lat', 'ncol', 1, &
924 0 : 'latitude', 'degrees_north', 1, 1, latval, map=pemap_scm)
925 : lon_coord => horiz_coord_create('lon', 'ncol', 1, &
926 0 : 'longitude', 'degrees_east', 1, 1, lonval, map=pemap_scm)
927 :
928 : call cam_grid_register('SCM', ini_decomp_scm, lat_coord, lon_coord, &
929 0 : grid_map_scm, block_indexed=.false., unstruct=.true.)
930 0 : deallocate(pemap_scm)
931 : ! grid_map cannot be deallocated as the cam_filemap_t object just points
932 : ! to it. It can be nullified.
933 0 : nullify(grid_map_scm)
934 : end if
935 :
936 : !---------------------------------
937 : ! Create FVM grid object for CSLAM
938 : !---------------------------------
939 :
940 1536 : if (use_cslam) then
941 :
942 1536 : ncols_fvm = nc * nc * nelemd
943 1536 : ngcols_fvm = nc * nc * nelem_d
944 4608 : allocate(fvm_coord(ncols_fvm))
945 3072 : allocate(fvm_map(ncols_fvm))
946 3072 : allocate(fvm_area(ncols_fvm))
947 3072 : allocate(fvm_areawt(ncols_fvm))
948 :
949 12336 : do ie = 1, nelemd
950 : k = 1
951 44736 : do j = 1, nc
952 140400 : do i = 1, nc
953 97200 : mapind = k + ((ie - 1) * nc * nc)
954 97200 : fvm_coord(mapind) = fvm(ie)%center_cart(i,j)%lon*rad2deg
955 97200 : fvm_map(mapind) = k + ((elem(ie)%GlobalId-1) * nc * nc)
956 97200 : fvm_area(mapind) = fvm(ie)%area_sphere(i,j)
957 97200 : fvm_areawt(mapind) = fvm_area(mapind)*rarea_unit_sphere
958 129600 : k = k + 1
959 : end do
960 : end do
961 : end do
962 : lon_coord => horiz_coord_create('lon_fvm', 'ncol_fvm', ngcols_fvm, &
963 : 'longitude', 'degrees_east', 1, size(fvm_coord), fvm_coord, &
964 1536 : map=fvm_map)
965 :
966 12336 : do ie = 1, nelemd
967 : k = 1
968 44736 : do j = 1, nc
969 140400 : do i = 1, nc
970 97200 : mapind = k + ((ie - 1) * nc * nc)
971 97200 : fvm_coord(mapind) = fvm(ie)%center_cart(i,j)%lat*rad2deg
972 129600 : k = k + 1
973 : end do
974 : end do
975 : end do
976 : lat_coord => horiz_coord_create('lat_fvm', 'ncol_fvm', ngcols_fvm, &
977 : 'latitude', 'degrees_north', 1, size(fvm_coord), fvm_coord, &
978 1536 : map=fvm_map)
979 :
980 : ! Map for FVM grid
981 4608 : allocate(grid_map(3, ncols_fvm))
982 390336 : grid_map = 0_iMap
983 1536 : mapind = 1
984 12336 : do j = 1, nelemd
985 109536 : do i = 1, nc*nc
986 97200 : grid_map(1, mapind) = i
987 97200 : grid_map(2, mapind) = j
988 97200 : grid_map(3, mapind) = fvm_map(mapind)
989 108000 : mapind = mapind + 1
990 : end do
991 : end do
992 :
993 : ! create FVM (CSLAM) grid object
994 : call cam_grid_register('FVM', fvm_decomp, lat_coord, lon_coord, &
995 1536 : grid_map, block_indexed=.false., unstruct=.true.)
996 : call cam_grid_attribute_register('FVM', 'area_fvm', 'fvm grid areas', &
997 1536 : 'ncol_fvm', fvm_area, map=fvm_map)
998 : call cam_grid_attribute_register('FVM', 'area_weight_fvm', 'fvm grid area weights', &
999 1536 : 'ncol_fvm', fvm_areawt, map=fvm_map)
1000 1536 : call cam_grid_attribute_register('FVM', 'nc', '', nc)
1001 1536 : call cam_grid_attribute_register('FVM', 'ne', '', ne)
1002 :
1003 1536 : deallocate(fvm_coord)
1004 1536 : deallocate(fvm_map)
1005 1536 : nullify(fvm_area)
1006 1536 : nullify(fvm_areawt)
1007 1536 : nullify(grid_map)
1008 :
1009 : end if
1010 :
1011 : !------------------------------------------------------------------
1012 : ! Create grid object for physics grid on the dynamics decomposition
1013 : !------------------------------------------------------------------
1014 :
1015 1536 : if (fv_nphys > 0) then
1016 :
1017 1536 : ncols_physgrid = fv_nphys * fv_nphys * nelemd
1018 1536 : ngcols_physgrid = fv_nphys * fv_nphys * nelem_d
1019 4608 : allocate(physgrid_coord(ncols_physgrid))
1020 3072 : allocate(physgrid_map(ncols_physgrid))
1021 3072 : allocate(physgrid_area(ncols_physgrid))
1022 3072 : allocate(physgrid_areawt(ncols_physgrid))
1023 :
1024 12336 : do ie = 1, nelemd
1025 : k = 1
1026 44736 : do j = 1, fv_nphys
1027 140400 : do i = 1, fv_nphys
1028 97200 : mapind = k + ((ie - 1) * fv_nphys * fv_nphys)
1029 97200 : physgrid_coord(mapind) = fvm(ie)%center_cart_physgrid(i,j)%lon*rad2deg
1030 97200 : physgrid_map(mapind) = k + ((elem(ie)%GlobalId-1) * fv_nphys * fv_nphys)
1031 97200 : physgrid_area(mapind) = fvm(ie)%area_sphere_physgrid(i,j)
1032 97200 : physgrid_areawt(mapind) = physgrid_area(mapind)*rarea_unit_sphere
1033 129600 : k = k + 1
1034 : end do
1035 : end do
1036 : end do
1037 : lon_coord => horiz_coord_create('lon', 'ncol', ngcols_physgrid, &
1038 : 'longitude', 'degrees_east', 1, size(physgrid_coord), physgrid_coord, &
1039 1536 : map=physgrid_map)
1040 :
1041 12336 : do ie = 1, nelemd
1042 10800 : k = 1
1043 44736 : do j = 1, fv_nphys
1044 140400 : do i = 1, fv_nphys
1045 97200 : mapind = k + ((ie - 1) * fv_nphys * fv_nphys)
1046 97200 : physgrid_coord(mapind) = fvm(ie)%center_cart_physgrid(i,j)%lat*rad2deg
1047 129600 : k = k + 1
1048 : end do
1049 : end do
1050 : end do
1051 : lat_coord => horiz_coord_create('lat', 'ncol', ngcols_physgrid, &
1052 : 'latitude', 'degrees_north', 1, size(physgrid_coord), physgrid_coord, &
1053 1536 : map=physgrid_map)
1054 :
1055 : ! Map for physics grid
1056 4608 : allocate(grid_map(3, ncols_physgrid))
1057 390336 : grid_map = 0_iMap
1058 1536 : mapind = 1
1059 12336 : do j = 1, nelemd
1060 109536 : do i = 1, fv_nphys*fv_nphys
1061 97200 : grid_map(1, mapind) = i
1062 97200 : grid_map(2, mapind) = j
1063 97200 : grid_map(3, mapind) = physgrid_map(mapind)
1064 108000 : mapind = mapind + 1
1065 : end do
1066 : end do
1067 :
1068 : ! create physics grid object
1069 : call cam_grid_register('physgrid_d', physgrid_d, lat_coord, lon_coord, &
1070 1536 : grid_map, block_indexed=.false., unstruct=.true.)
1071 : call cam_grid_attribute_register('physgrid_d', 'area_physgrid', 'physics grid areas', &
1072 1536 : 'ncol', physgrid_area, map=physgrid_map)
1073 : call cam_grid_attribute_register('physgrid_d', 'area_weight_physgrid', 'physics grid area weight', &
1074 1536 : 'ncol', physgrid_areawt, map=physgrid_map)
1075 1536 : call cam_grid_attribute_register('physgrid_d', 'fv_nphys', '', fv_nphys)
1076 1536 : call cam_grid_attribute_register('physgrid_d', 'ne', '', ne)
1077 :
1078 1536 : deallocate(physgrid_coord)
1079 1536 : deallocate(physgrid_map)
1080 1536 : nullify(physgrid_area)
1081 1536 : nullify(physgrid_areawt)
1082 1536 : nullify(grid_map)
1083 :
1084 : end if
1085 :
1086 1536 : nullify(lat_coord) ! Belongs to grid
1087 1536 : nullify(lon_coord) ! Belongs to grid
1088 :
1089 3072 : end subroutine define_cam_grids
1090 :
1091 : !========================================================================================
1092 :
1093 0 : subroutine write_grid_mapping(par, elem)
1094 :
1095 1536 : use parallel_mod, only: parallel_t
1096 : use cam_pio_utils, only: cam_pio_createfile, pio_subsystem
1097 : use pio, only: pio_def_dim, var_desc_t, pio_int, pio_def_var, &
1098 : pio_enddef, pio_closefile, pio_initdecomp, io_desc_t, &
1099 : pio_write_darray, pio_freedecomp
1100 : use dof_mod, only: createmetadata
1101 :
1102 : ! arguments
1103 : type(parallel_t), intent(in) :: par
1104 : type(element_t), intent(in) :: elem(:)
1105 :
1106 : ! local variables
1107 : integer, parameter :: npm12 = (np-1)*(np-1)
1108 :
1109 : type(file_desc_t) :: nc
1110 : type(var_desc_t) :: vid
1111 : type(io_desc_t) :: iodesc
1112 : integer :: dim1, dim2, ierr, i, j, ie, cc, base, ii, jj
1113 0 : integer :: subelement_corners(npm12*nelemd,4)
1114 0 : integer :: dof(npm12*nelemd*4)
1115 : !----------------------------------------------------------------------------
1116 :
1117 : ! Create a CS grid mapping file for postprocessing tools
1118 :
1119 : ! write meta data for physics on GLL nodes
1120 0 : call cam_pio_createfile(nc, 'SEMapping.nc', 0)
1121 :
1122 0 : ierr = pio_def_dim(nc, 'ncenters', npm12*nelem_d, dim1)
1123 0 : ierr = pio_def_dim(nc, 'ncorners', 4, dim2)
1124 0 : ierr = pio_def_var(nc, 'element_corners', PIO_INT, (/dim1,dim2/), vid)
1125 :
1126 0 : ierr = pio_enddef(nc)
1127 0 : call createmetadata(par, elem, subelement_corners)
1128 :
1129 0 : jj=0
1130 0 : do cc = 0, 3
1131 0 : do ie = 1, nelemd
1132 0 : base = ((elem(ie)%globalid-1)+cc*nelem_d)*npm12
1133 0 : ii=0
1134 0 : do j = 1, np-1
1135 0 : do i = 1, np-1
1136 0 : ii=ii+1
1137 0 : jj=jj+1
1138 0 : dof(jj) = base+ii
1139 : end do
1140 : end do
1141 : end do
1142 : end do
1143 :
1144 0 : call pio_initdecomp(pio_subsystem, pio_int, (/nelem_d*npm12,4/), dof, iodesc)
1145 :
1146 : call pio_write_darray(nc, vid, iodesc, &
1147 0 : reshape(subelement_corners, (/nelemd*npm12*4/)), ierr)
1148 :
1149 0 : call pio_freedecomp(nc, iodesc)
1150 :
1151 0 : call pio_closefile(nc)
1152 :
1153 0 : end subroutine write_grid_mapping
1154 :
1155 : !=========================================================================================
1156 :
1157 0 : subroutine create_global_area(area_d)
1158 0 : use dp_mapping, only: dp_reorder, dp_allocate, dp_deallocate
1159 :
1160 : ! Gather global array of column areas for the physics grid,
1161 : ! reorder to global column order, then broadcast it to all tasks.
1162 :
1163 : ! Input variables
1164 : real(r8), pointer :: area_d(:)
1165 :
1166 : ! Local variables
1167 : real(r8) :: areaw(np,np)
1168 0 : real(r8), allocatable :: rbuf(:), dp_area(:,:)
1169 0 : integer :: rdispls(npes), recvcounts(npes)
1170 : integer :: ncol
1171 : integer :: ie, sb, eb, i, j, k
1172 : integer :: ierr
1173 : integer :: ibuf
1174 : character(len=*), parameter :: sub = 'create_global_area'
1175 : !----------------------------------------------------------------------------
1176 :
1177 0 : if (masterproc) then
1178 0 : write(iulog, *) sub//': INFO: Non-scalable action: gathering global area in SE dycore.'
1179 : end if
1180 :
1181 0 : if (fv_nphys > 0) then ! physics uses an FVM grid
1182 :
1183 : ! first gather all data onto masterproc, in mpi task order (via
1184 : ! mpi_gatherv) then redorder into globalID order (via dp_reorder)
1185 0 : ncol = fv_nphys*fv_nphys*nelem_d
1186 0 : allocate(rbuf(ncol))
1187 0 : allocate(dp_area(fv_nphys*fv_nphys,nelem_d))
1188 :
1189 0 : do ie = 1, nelemd
1190 : k = 1
1191 0 : do j = 1, fv_nphys
1192 0 : do i = 1, fv_nphys
1193 0 : dp_area(k,ie) = fvm(ie)%area_sphere_physgrid(i,j)
1194 0 : k = k + 1
1195 : end do
1196 : end do
1197 : end do
1198 :
1199 : call mpi_gather(nelemd*fv_nphys*fv_nphys, 1, mpi_integer, recvcounts, 1, &
1200 0 : mpi_integer, mstrid, mpicom, ierr)
1201 : ! Figure global displacements
1202 0 : if (masterproc) then
1203 0 : rdispls(1) = 0
1204 0 : do ie = 2, npes
1205 0 : rdispls(ie) = rdispls(ie-1) + recvcounts(ie-1)
1206 : end do
1207 : ! Check to make sure we counted correctly
1208 0 : if (rdispls(npes) + recvcounts(npes) /= ncol) then
1209 0 : call endrun(sub//': bad rdispls array size')
1210 : end if
1211 : end if
1212 :
1213 : ! Gather up the areas onto the masterproc
1214 : call mpi_gatherv(dp_area, fv_nphys*fv_nphys*nelemd, mpi_real8, rbuf, &
1215 0 : recvcounts, rdispls, mpi_real8, mstrid, mpicom, ierr)
1216 :
1217 : ! Reorder to global order
1218 0 : call dp_allocate(elem)
1219 0 : if (masterproc) call dp_reorder(rbuf, area_d)
1220 0 : call dp_deallocate()
1221 :
1222 : ! Send everyone else the data
1223 0 : call mpi_bcast(area_d, ncol, mpi_real8, mstrid, mpicom, ierr)
1224 :
1225 0 : deallocate(dp_area)
1226 :
1227 : else ! physics is on the GLL grid
1228 :
1229 0 : allocate(rbuf(ngcols_d))
1230 0 : do ie = 1, nelemdmax
1231 0 : if (ie <= nelemd) then
1232 0 : rdispls(iam+1) = elem(ie)%idxp%UniquePtOffset - 1
1233 0 : eb = rdispls(iam+1) + elem(ie)%idxp%NumUniquePts
1234 0 : recvcounts(iam+1) = elem(ie)%idxP%NumUniquePts
1235 0 : areaw = 1.0_r8 / elem(ie)%rspheremp(:,:)
1236 0 : call UniquePoints(elem(ie)%idxP, areaw, area_d(rdispls(iam+1)+1:eb))
1237 : else
1238 0 : rdispls(iam+1) = 0
1239 0 : recvcounts(iam+1) = 0
1240 : end if
1241 :
1242 0 : ibuf = rdispls(iam+1)
1243 : call mpi_allgather(ibuf, 1, mpi_integer, rdispls, &
1244 0 : 1, mpi_integer, mpicom, ierr)
1245 :
1246 0 : ibuf = recvcounts(iam+1)
1247 : call mpi_allgather(ibuf, 1, mpi_integer, recvcounts, &
1248 0 : 1, mpi_integer, mpicom, ierr)
1249 :
1250 0 : sb = rdispls(iam+1) + 1
1251 0 : eb = rdispls(iam+1) + recvcounts(iam+1)
1252 :
1253 0 : rbuf(1:recvcounts(iam+1)) = area_d(sb:eb)
1254 : call mpi_allgatherv(rbuf, recvcounts(iam+1), mpi_real8, area_d, &
1255 0 : recvcounts(:), rdispls(:), mpi_real8, mpicom, ierr)
1256 : end do
1257 :
1258 : end if
1259 :
1260 0 : deallocate(rbuf)
1261 :
1262 0 : end subroutine create_global_area
1263 :
1264 : !=========================================================================================
1265 :
1266 0 : subroutine create_global_coords(clat, clon, lat_out, lon_out)
1267 : use dp_mapping, only: dp_reorder, dp_allocate, dp_deallocate
1268 :
1269 : ! Gather global arrays of column coordinates for the physics grid,
1270 : ! reorder to global column order, then broadcast to all tasks.
1271 :
1272 : ! arguments
1273 : real(r8), intent(out) :: clat(:)
1274 : real(r8), intent(out) :: clon(:)
1275 : real(r8), optional, intent(out) :: lat_out(:)
1276 : real(r8), optional, intent(out) :: lon_out(:)
1277 :
1278 : ! Local variables
1279 0 : real(r8), allocatable :: rbuf(:), dp_lon(:,:), dp_lat(:,:)
1280 0 : integer :: rdispls(npes), recvcounts(npes)
1281 : integer :: ie, sb, eb, i, j, k
1282 : integer :: ierr
1283 : integer :: ibuf
1284 : integer :: ncol
1285 : character(len=*), parameter :: sub='create_global_coords'
1286 : !----------------------------------------------------------------------------
1287 :
1288 0 : if (masterproc) then
1289 0 : write(iulog, *) sub//': INFO: Non-scalable action: Creating global coords in SE dycore.'
1290 : end if
1291 :
1292 0 : clat(:) = -iam
1293 0 : clon(:) = -iam
1294 0 : if (present(lon_out)) then
1295 0 : lon_out(:) = -iam
1296 : end if
1297 0 : if (present(lat_out)) then
1298 0 : lat_out(:) = -iam
1299 : end if
1300 :
1301 0 : if (fv_nphys > 0) then ! physics uses an FVM grid
1302 :
1303 : ! first gather all data onto masterproc, in mpi task order (via
1304 : ! mpi_gatherv) then redorder into globalID order (via dp_reorder)
1305 :
1306 0 : ncol = fv_nphys*fv_nphys*nelem_d
1307 0 : allocate(rbuf(ncol))
1308 0 : allocate(dp_lon(fv_nphys*fv_nphys,nelem_d))
1309 0 : allocate(dp_lat(fv_nphys*fv_nphys,nelem_d))
1310 :
1311 0 : do ie = 1, nelemd
1312 : k = 1
1313 0 : do j = 1, fv_nphys
1314 0 : do i = 1, fv_nphys
1315 0 : dp_lon(k,ie) = fvm(ie)%center_cart_physgrid(i,j)%lon ! radians
1316 0 : dp_lat(k,ie) = fvm(ie)%center_cart_physgrid(i,j)%lat
1317 0 : k = k + 1
1318 : end do
1319 : end do
1320 : end do
1321 :
1322 : call mpi_gather(nelemd*fv_nphys*fv_nphys, 1, mpi_integer, recvcounts, &
1323 0 : 1, mpi_integer, mstrid, mpicom, ierr)
1324 :
1325 : ! Figure global displacements
1326 0 : if (masterproc) then
1327 0 : rdispls(1) = 0
1328 0 : do ie = 2, npes
1329 0 : rdispls(ie) = rdispls(ie-1) + recvcounts(ie-1)
1330 : end do
1331 : ! Check to make sure we counted correctly
1332 0 : if (rdispls(npes) + recvcounts(npes) /= ncol) then
1333 0 : call endrun(sub//': bad rdispls array size')
1334 : end if
1335 : end if
1336 :
1337 : ! Gather up global latitudes
1338 : call mpi_gatherv(dp_lat, fv_nphys*fv_nphys*nelemd, mpi_real8, rbuf, &
1339 0 : recvcounts, rdispls, mpi_real8, mstrid, mpicom, ierr)
1340 :
1341 : ! Reorder to global order
1342 0 : call dp_allocate(elem)
1343 0 : if (masterproc) call dp_reorder(rbuf, clat)
1344 :
1345 : ! Send everyone else the data
1346 0 : call mpi_bcast(clat, ncol, mpi_real8, mstrid, mpicom, ierr)
1347 :
1348 : ! Gather up global longitudes
1349 : call mpi_gatherv(dp_lon, fv_nphys*fv_nphys*nelemd, mpi_real8, rbuf, &
1350 0 : recvcounts, rdispls, mpi_real8, mstrid, mpicom, ierr)
1351 :
1352 : ! Reorder to global order
1353 0 : if (masterproc) call dp_reorder(rbuf, clon)
1354 0 : call dp_deallocate()
1355 :
1356 : ! Send everyone else the data
1357 0 : call mpi_bcast(clon, ncol, mpi_real8, mstrid, mpicom, ierr)
1358 :
1359 : ! Create degree versions if requested
1360 0 : if (present(lat_out)) then
1361 0 : lat_out(:) = clat(:) * rad2deg
1362 : end if
1363 0 : if (present(lon_out)) then
1364 0 : lon_out(:) = clon(:) * rad2deg
1365 : end if
1366 :
1367 0 : deallocate(dp_lon)
1368 0 : deallocate(dp_lat)
1369 :
1370 : else ! physics uses the GLL grid
1371 :
1372 0 : allocate(rbuf(ngcols_d))
1373 :
1374 0 : do ie = 1, nelemdmax
1375 :
1376 0 : if(ie <= nelemd) then
1377 0 : rdispls(iam+1) = elem(ie)%idxp%UniquePtOffset - 1
1378 0 : eb = rdispls(iam+1) + elem(ie)%idxp%NumUniquePts
1379 0 : recvcounts(iam+1) = elem(ie)%idxP%NumUniquePts
1380 :
1381 0 : call UniqueCoords(elem(ie)%idxP, elem(ie)%spherep, &
1382 0 : clat(rdispls(iam+1)+1:eb), clon(rdispls(iam+1)+1:eb))
1383 :
1384 0 : if (present(lat_out)) then
1385 0 : lat_out(rdispls(iam+1)+1:eb) = clat(rdispls(iam+1)+1:eb) * rad2deg
1386 : end if
1387 :
1388 0 : if (present(lon_out)) then
1389 0 : lon_out(rdispls(iam+1)+1:eb) = clon(rdispls(iam+1)+1:eb) * rad2deg
1390 : end if
1391 :
1392 : else
1393 0 : rdispls(iam+1) = 0
1394 0 : recvcounts(iam+1) = 0
1395 : end if
1396 :
1397 0 : ibuf = rdispls(iam+1)
1398 : call mpi_allgather(ibuf, 1, mpi_integer, rdispls, &
1399 0 : 1, mpi_integer, mpicom, ierr)
1400 :
1401 0 : ibuf = recvcounts(iam+1)
1402 : call mpi_allgather(ibuf, 1, mpi_integer, recvcounts, &
1403 0 : 1, mpi_integer, mpicom, ierr)
1404 :
1405 0 : sb = rdispls(iam+1) + 1
1406 0 : eb = rdispls(iam+1) + recvcounts(iam+1)
1407 :
1408 0 : rbuf(1:recvcounts(iam+1)) = clat(sb:eb) ! whats going to happen if end=0?
1409 : call mpi_allgatherv(rbuf, recvcounts(iam+1), mpi_real8, clat, &
1410 0 : recvcounts(:), rdispls(:), mpi_real8, mpicom, ierr)
1411 :
1412 0 : if (present(lat_out)) then
1413 0 : rbuf(1:recvcounts(iam+1)) = lat_out(sb:eb)
1414 : call mpi_allgatherv(rbuf, recvcounts(iam+1), mpi_real8, lat_out, &
1415 0 : recvcounts(:), rdispls(:), mpi_real8, mpicom, ierr)
1416 : end if
1417 :
1418 0 : rbuf(1:recvcounts(iam+1)) = clon(sb:eb)
1419 : call mpi_allgatherv(rbuf, recvcounts(iam+1), mpi_real8, clon, &
1420 0 : recvcounts(:), rdispls(:), mpi_real8, mpicom, ierr)
1421 :
1422 0 : if (present(lon_out)) then
1423 0 : rbuf(1:recvcounts(iam+1)) = lon_out(sb:eb)
1424 : call mpi_allgatherv(rbuf, recvcounts(iam+1), mpi_real8, lon_out, &
1425 0 : recvcounts(:), rdispls(:), mpi_real8, mpicom, ierr)
1426 : end if
1427 :
1428 : end do ! ie = 1, nelemdmax
1429 :
1430 : end if ! (fv_nphys > 0)
1431 :
1432 0 : end subroutine create_global_coords
1433 :
1434 : !=============================================================================
1435 : !==
1436 : !!!!!! DUMMY INTERFACE TO TEST WEAK SCALING FIX, THIS SHOULD GO AWAY
1437 : !==
1438 : !=============================================================================
1439 :
1440 0 : subroutine get_horiz_grid_d(nxy, clat_d_out, clon_d_out, area_d_out, &
1441 : wght_d_out, lat_d_out, lon_d_out)
1442 :
1443 : ! Return global arrays of latitude and longitude (in radians), column
1444 : ! surface area (in radians squared) and surface integration weights for
1445 : ! global column indices that will be passed to/from physics
1446 :
1447 : ! arguments
1448 : integer, intent(in) :: nxy ! array sizes
1449 :
1450 : real(r8), intent(out), optional :: clat_d_out(:) ! column latitudes
1451 : real(r8), intent(out), optional :: clon_d_out(:) ! column longitudes
1452 : real(r8), intent(out), target, optional :: area_d_out(:) ! column surface
1453 :
1454 : real(r8), intent(out), target, optional :: wght_d_out(:) ! column integration weight
1455 : real(r8), intent(out), optional :: lat_d_out(:) ! column degree latitudes
1456 : real(r8), intent(out), optional :: lon_d_out(:) ! column degree longitudes
1457 : character(len=*), parameter :: subname = 'get_horiz_grid_d'
1458 :
1459 0 : call endrun(subname//': NOT SUPPORTED WITH WEAK SCALING FIX')
1460 0 : end subroutine get_horiz_grid_d
1461 :
1462 : !==============================================================================
1463 :
1464 0 : function get_dyn_grid_parm_real1d(name) result(rval)
1465 :
1466 : ! This routine is not used for SE, but still needed as a dummy interface to satisfy
1467 : ! references from mo_synoz.F90
1468 :
1469 : character(len=*), intent(in) :: name
1470 : real(r8), pointer :: rval(:)
1471 :
1472 0 : if(name.eq.'w') then
1473 0 : call endrun('get_dyn_grid_parm_real1d: w not defined')
1474 0 : else if(name.eq.'clat') then
1475 0 : call endrun('get_dyn_grid_parm_real1d: clat not supported')
1476 0 : else if(name.eq.'latdeg') then
1477 0 : call endrun('get_dyn_grid_parm_real1d: latdeg not defined')
1478 : else
1479 0 : nullify(rval)
1480 : end if
1481 0 : end function get_dyn_grid_parm_real1d
1482 :
1483 : !==============================================================================
1484 :
1485 : end module dyn_grid
|