LCOV - code coverage report
Current view: top level - dynamics/fv - dyn_comp.F90 (source / functions) Hit Total Coverage
Test: coverage.info Lines: 793 1334 59.4 %
Date: 2025-03-14 01:33:33 Functions: 8 13 61.5 %

          Line data    Source code
       1             : module dyn_comp
       2             : 
       3             : !----------------------------------------------------------------------
       4             : !   This module contains the interfaces for the Finite-Volume
       5             : !   Dynamical Core used in the Community Atmospheric Model
       6             : !   (FVCAM). This component will hereafter be referred
       7             : !   to as the ``FVdycore'' gridded component.  FVdycore
       8             : !   consists of four sub-components,
       9             : !
      10             : !      cd_core:  The C/D-grid dycore component
      11             : !      te_map:   Vertical remapping algorithm
      12             : !      trac2d:   Tracer advection
      13             : !      benergy:  Energy balance
      14             : !
      15             : !  FVdycore maintains an internal state consisting of the
      16             : !  following fields:  control variables
      17             : !
      18             : !     U:    U winds on a D-grid (m/s)
      19             : !     V:    V winds on a D-grid (m/s)
      20             : !     PT:   Scaled Virtual Potential Temperature (T_v/PKZ)
      21             : !     PE:   Edge pressures
      22             : !     Q:    Tracers
      23             : !     PKZ:  Consistent mean for p^kappa
      24             : !
      25             : !  as well as a GRID and same additional run-specific variables
      26             : !  (dt, iord, jord, nsplit, nspltrac, nspltvrm)
      27             : !
      28             : ! Note: PT is not updated if the flag CONVT is true.
      29             : !
      30             : ! The internal state is updated each time FVdycore is called.
      31             : !
      32             : ! REVISION HISTORY:
      33             : !
      34             : !   WS  05.06.10:  Adapted from FVdycore_GridCompMod
      35             : !   WS  05.09.20:  Renamed dyn_comp
      36             : !   WS  05.11.10:  Now using dyn_import/export_t containers
      37             : !   WS  06.03.01:  Removed tracertrans-related variables
      38             : !   WS  06.04.13:  dyn_state moved here from prognostics
      39             : !   CC  07.01.29:  Corrected calculation of OMGA
      40             : !   AM  07.10.31:  Supports overlap of trac2d and cd_core subcycles
      41             : !----------------------------------------------------------------------
      42             : 
      43             : use shr_kind_mod,       only: r8=>shr_kind_r8
      44             : use spmd_utils,         only: masterproc, iam
      45             : use physconst,          only: pi
      46             : 
      47             : use pmgrid,             only: plon, plat
      48             : use constituents,       only: pcnst, cnst_name, cnst_read_iv, qmin, cnst_type, &
      49             :                               cnst_is_a_water_species
      50             : 
      51             : use time_manager,       only: get_step_size
      52             : 
      53             : use dynamics_vars,      only: t_fvdycore_grid,            &
      54             :                               t_fvdycore_state, t_fvdycore_constants
      55             : use dyn_internal_state, only: get_dyn_state, get_dyn_state_grid
      56             : 
      57             : use dyn_grid,           only: get_horiz_grid_dim_d
      58             : use commap,             only: clat, clon, clat_staggered, londeg_st
      59             : use spmd_dyn,           only: spmd_readnl
      60             : 
      61             : use inic_analytic,      only: analytic_ic_active, analytic_ic_set_ic
      62             : use dyn_tests_utils,    only: vc_moist_pressure
      63             : 
      64             : use cam_control_mod,    only: initial_run, moist_physics
      65             : use phys_control,       only: phys_setopts
      66             : 
      67             : use cam_initfiles,      only: initial_file_get_id, topo_file_get_id, pertlim
      68             : use cam_pio_utils,      only: clean_iodesc_list
      69             : use ncdio_atm,          only: infld
      70             : use pio,                only: pio_seterrorhandling, pio_bcast_error, pio_noerr, &
      71             :                               file_desc_t, pio_inq_dimid, pio_inq_dimlen,       &
      72             :                               pio_inq_varid, pio_get_att
      73             : 
      74             : use perf_mod,           only: t_startf, t_stopf, t_barrierf
      75             : use cam_logfile,        only: iulog
      76             : use cam_abortutils,     only: endrun
      77             : 
      78             : use par_vecsum_mod,     only: par_vecsum
      79             : use te_map_mod,         only: te_map
      80             : 
      81             : implicit none
      82             : private
      83             : save
      84             : 
      85             : public :: &
      86             :    dyn_readnl,   &
      87             :    dyn_register, &
      88             :    dyn_init,     &
      89             :    dyn_run,      &
      90             :    dyn_final,    &
      91             :    dyn_import_t, &
      92             :    dyn_export_t, &
      93             :    dyn_state,    &
      94             :    frontgf_idx,  &
      95             :    frontga_idx,  &
      96             :    uzm_idx,      &
      97             :    initial_mr
      98             : 
      99             : type (t_fvdycore_state), target :: dyn_state
     100             : 
     101             : type dyn_import_t
     102             :      real(r8), dimension(:,: ),    pointer :: phis   ! Surface geopotential
     103             :      real(r8), dimension(:,: ),    pointer :: ps     ! Surface pressure
     104             :      real(r8), dimension(:,:,:),   pointer :: u3s    ! U-winds (staggered)
     105             :      real(r8), dimension(:,:,:),   pointer :: v3s    ! V-winds (staggered)
     106             :      real(r8), dimension(:,:,:),   pointer :: pe     ! Pressure
     107             :      real(r8), dimension(:,:,:),   pointer :: pt     ! Potential temperature
     108             :      real(r8), dimension(:,:,:),   pointer :: t3     ! Temperatures
     109             :      real(r8), dimension(:,:,:),   pointer :: pk     ! Pressure to the kappa
     110             :      real(r8), dimension(:,:,:),   pointer :: pkz    ! Pressure to the kappa offset
     111             :      real(r8), dimension(:,:,:),   pointer :: delp   ! Delta pressure
     112             :      real(r8), dimension(:,:,:,:), pointer :: tracer ! Tracers
     113             : end type dyn_import_t
     114             : 
     115             : type dyn_export_t
     116             :      real(r8), dimension(:,: ),    pointer :: phis   ! Surface geopotential
     117             :      real(r8), dimension(:,: ),    pointer :: ps     ! Surface pressure
     118             :      real(r8), dimension(:,:,:),   pointer :: u3s    ! U-winds (staggered)
     119             :      real(r8), dimension(:,:,:),   pointer :: v3s    ! V-winds (staggered)
     120             :      real(r8), dimension(:,:,:),   pointer :: pe     ! Pressure
     121             :      real(r8), dimension(:,:,:),   pointer :: pt     ! Potential temperature
     122             :      real(r8), dimension(:,:,:),   pointer :: t3     ! Temperatures
     123             :      real(r8), dimension(:,:,:),   pointer :: pk     ! Pressure to the kappa
     124             :      real(r8), dimension(:,:,:),   pointer :: pkz    ! Pressure to the kappa offset
     125             :      real(r8), dimension(:,:,:),   pointer :: delp   ! Delta pressure
     126             :      real(r8), dimension(:,:,:,:), pointer :: tracer ! Tracers
     127             :      real(r8), dimension(:,:,:),   pointer :: peln   !
     128             :      real(r8), dimension(:,:,:),   pointer :: omga   ! Vertical velocity
     129             :      real(r8), dimension(:,:,:),   pointer :: mfx    ! Mass flux in X
     130             :      real(r8), dimension(:,:,:),   pointer :: mfy    ! Mass flux in Y
     131             :      real(r8), dimension(:,:,:),   pointer :: du3s   ! U-wind tend. from dycore (staggered)
     132             :      real(r8), dimension(:,:,:),   pointer :: dv3s   ! V-wind tend. from dycore (staggered)
     133             :      real(r8), dimension(:,:,:),   pointer :: dua3s  ! U-wind tend. from advection (stagg)
     134             :      real(r8), dimension(:,:,:),   pointer :: dva3s  ! V-wind tend. from advection (stagg)
     135             :      real(r8), dimension(:,:,:),   pointer :: duf3s  ! U-wind tend. from fixer (staggered)
     136             : end type dyn_export_t
     137             : 
     138             : ! The FV core is always called in its "full physics" mode.  We don't want
     139             : ! the dycore to know what physics package is responsible for the forcing.
     140             : logical, parameter         :: convt = .true.
     141             : 
     142             : ! Indices for fields that are computed in the dynamics and passed to the physics
     143             : ! via the physics buffer
     144             : integer, protected :: frontgf_idx  = -1
     145             : integer, protected :: frontga_idx  = -1
     146             : integer, protected :: uzm_idx = -1
     147             : 
     148             : character(len=3), protected :: initial_mr(pcnst) ! constituents initialized with wet or dry mr
     149             : 
     150             : logical :: readvar            ! inquiry flag:  true => variable exists on netCDF file
     151             : 
     152             : character(len=8)  :: fv_print_dpcoup_warn = "off"
     153             : public            :: fv_print_dpcoup_warn
     154             : 
     155             : !=============================================================================================
     156             : CONTAINS
     157             : !=============================================================================================
     158             : 
     159        1536 : subroutine dyn_readnl(nlfilename)
     160             : 
     161             :    ! Read dynamics namelist group.
     162             :    use units,           only: getunit, freeunit
     163             :    use namelist_utils,  only: find_group_name
     164             :    use spmd_utils,      only: mpicom, mstrid=>masterprocid, mpi_integer, mpi_real8, &
     165             :                               mpi_logical, mpi_character
     166             :    use ctem,            only: ctem_readnl
     167             :    use fill_module,     only: fill_readnl
     168             : 
     169             :    ! args
     170             :    character(len=*), intent(in) :: nlfilename
     171             : 
     172             :    ! Local variables
     173             :    integer :: ierr
     174             :    integer :: unitn
     175             :    character(len=*), parameter ::  sub="dyn_readnl"
     176             : 
     177             :    integer :: fv_nsplit      = 0       ! Lagrangian time splits
     178             :    integer :: fv_nspltrac    = 0       ! Tracer time splits
     179             :    integer :: fv_nspltvrm    = 0       ! Vertical re-mapping time splits
     180             :    integer :: fv_iord        = 4       ! scheme to be used in E-W direction
     181             :    integer :: fv_jord        = 4       ! scheme to be used in N-S direction
     182             :    integer :: fv_kord        = 4       ! scheme to be used for vertical mapping
     183             :                                        ! _ord = 1: first order upwind
     184             :                                        ! _ord = 2: 2nd order van Leer (Lin et al 1994)
     185             :                                        ! _ord = 3: standard PPM
     186             :                                        ! _ord = 4: enhanced PPM (default)
     187             :    logical :: fv_conserve    = .false. ! Flag indicating whether the dynamics is conservative
     188             :    integer :: fv_filtcw      = 0       ! flag for filtering c-grid winds
     189             :    integer :: fv_fft_flt     = 1       ! 0 => FFT/algebraic filter; 1 => FFT filter
     190             :    integer :: fv_div24del2flag = 2     ! 2 for 2nd order div damping, 4 for 4th order div damping,
     191             :                                        ! 42 for 4th order div damping plus 2nd order velocity damping
     192             :    real(r8):: fv_del2coef    = 3.e5_r8 ! strength of 2nd order velocity damping
     193             :    logical :: fv_high_altitude = .false. ! switch to apply variables appropriate for high-altitude physics
     194             : 
     195             :    logical :: fv_high_order_top=.false.! do not degrade calculation to 1st order near the model top
     196             : 
     197             :    logical :: fv_am_correction=.false. ! apply correction for angular momentum (AM) in SW eqns
     198             :    logical :: fv_am_geom_crrct=.false. ! apply correction for angular momentum (AM) in geometry
     199             :    logical :: fv_am_fixer     =.false. ! apply global fixer to conserve AM
     200             :    logical :: fv_am_fix_lbl   =.false. ! apply global AM fixer level by level
     201             :    logical :: fv_am_diag      =.false. ! turns on an AM diagnostic calculation written to log file
     202             : 
     203             :    namelist /dyn_fv_inparm/ fv_nsplit, fv_nspltrac, fv_nspltvrm, fv_iord, fv_jord, &
     204             :                             fv_kord, fv_conserve, fv_filtcw, fv_fft_flt,           &
     205             :                             fv_div24del2flag, fv_del2coef, fv_high_order_top,      &
     206             :                             fv_am_correction, fv_am_geom_crrct, &
     207             :                             fv_am_fixer, fv_am_fix_lbl, fv_am_diag, fv_high_altitude, &
     208             :                             fv_print_dpcoup_warn
     209             : 
     210             :    type(t_fvdycore_state), pointer :: dyn_state
     211             : 
     212             :    real(r8) :: dt
     213             :    !-----------------------------------------------------------------------------
     214             : 
     215        1536 :    if (masterproc) then
     216           2 :       write(iulog,*) 'Read in dyn_fv_inparm namelist from: ', trim(nlfilename)
     217           2 :       unitn = getunit()
     218           2 :       open( unitn, file=trim(nlfilename), status='old' )
     219             : 
     220             :       ! Look for dyn_fv_inparm group name in the input file.  If found, leave the
     221             :       ! file positioned at that namelist group.
     222           2 :       call find_group_name(unitn, 'dyn_fv_inparm', status=ierr)
     223           2 :       if (ierr == 0) then  ! found dyn_fv_inparm
     224           2 :          read(unitn, dyn_fv_inparm, iostat=ierr)  ! read the dyn_fv_inparm namelist group
     225           2 :          if (ierr /= 0) then
     226           0 :             call endrun(sub//': ERROR reading dyn_fv_inparm')
     227             :          end if
     228             :       else
     229           0 :          call endrun(sub//': can''t find dyn_fv_inparm in file '//trim(nlfilename))
     230             :       end if
     231           2 :       close( unitn )
     232           2 :       call freeunit( unitn )
     233             :    endif
     234             : 
     235        1536 :    call mpi_bcast(fv_nsplit, 1, mpi_integer, mstrid, mpicom, ierr)
     236        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_nsplit")
     237             : 
     238        1536 :    call mpi_bcast(fv_nspltrac, 1, mpi_integer, mstrid, mpicom, ierr)
     239        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_nspltrac")
     240             : 
     241        1536 :    call mpi_bcast(fv_nspltvrm, 1, mpi_integer, mstrid, mpicom, ierr)
     242        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_nspltvrm")
     243             : 
     244        1536 :    call mpi_bcast(fv_iord, 1, mpi_integer, mstrid, mpicom, ierr)
     245        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_iord")
     246             : 
     247        1536 :    call mpi_bcast(fv_jord, 1, mpi_integer, mstrid, mpicom, ierr)
     248        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_jord")
     249             : 
     250        1536 :    call mpi_bcast(fv_kord, 1, mpi_integer, mstrid, mpicom, ierr)
     251        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_kord")
     252             : 
     253        1536 :    call mpi_bcast(fv_conserve, 1, mpi_logical, mstrid, mpicom, ierr)
     254        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_conserve")
     255             : 
     256        1536 :    call mpi_bcast(fv_filtcw, 1, mpi_integer, mstrid, mpicom, ierr)
     257        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_filtcw")
     258             : 
     259        1536 :    call mpi_bcast(fv_fft_flt, 1, mpi_integer, mstrid, mpicom, ierr)
     260        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_fft_flt")
     261             : 
     262        1536 :    call mpi_bcast(fv_div24del2flag, 1, mpi_integer, mstrid, mpicom, ierr)
     263        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_div24del2flag")
     264             : 
     265        1536 :    call mpi_bcast(fv_del2coef, 1, mpi_real8, mstrid, mpicom, ierr)
     266        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_del2coef")
     267             : 
     268        1536 :    call mpi_bcast(fv_high_order_top, 1, mpi_logical, mstrid, mpicom, ierr)
     269        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_high_order_top")
     270             : 
     271        1536 :    call mpi_bcast(fv_am_correction, 1, mpi_logical, mstrid, mpicom, ierr)
     272        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_am_correction")
     273             : 
     274        1536 :    call mpi_bcast(fv_am_geom_crrct, 1, mpi_logical, mstrid, mpicom, ierr)
     275        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_am_geom_crrct")
     276             : 
     277             :    ! if fv_am_fix_lbl is true then fv_am_fixer must also be true.
     278        1536 :    if (fv_am_fix_lbl .and. .not. fv_am_fixer) then
     279           0 :       fv_am_fixer = .true.
     280             :    end if
     281             : 
     282        1536 :    call mpi_bcast(fv_am_fixer, 1, mpi_logical, mstrid, mpicom, ierr)
     283        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_am_fixer")
     284             : 
     285        1536 :    call mpi_bcast(fv_am_fix_lbl, 1, mpi_logical, mstrid, mpicom, ierr)
     286        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_am_fix_lbl")
     287             : 
     288        1536 :    call mpi_bcast(fv_am_diag, 1, mpi_logical, mstrid, mpicom, ierr)
     289        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_am_diag")
     290             : 
     291        1536 :    call mpi_bcast(fv_high_altitude, 1, mpi_logical, mstrid, mpicom, ierr)
     292        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_high_altitude")
     293             : 
     294        1536 :    call mpi_bcast(fv_print_dpcoup_warn, len(fv_print_dpcoup_warn), mpi_character, mstrid, mpicom, ierr)
     295        1536 :    if (ierr /= 0) call endrun(sub//": FATAL: mpi_bcast: fv_print_dpcoup_warn")
     296             : 
     297             :    ! Store namelist settings in fv state object
     298        1536 :    dyn_state => get_dyn_state()
     299             : 
     300        1536 :    dyn_state%grid%high_alt = fv_high_altitude
     301             : 
     302             :    ! Calculate nsplit if it was specified as 0
     303        1536 :    if ( fv_nsplit <= 0 ) then
     304           0 :       dt = get_step_size()
     305           0 :       dyn_state%nsplit= init_nsplit(dt, plon, plat)
     306             :    else
     307        1536 :       dyn_state%nsplit= fv_nsplit
     308             :    end if
     309             : 
     310             :    ! Calculate nspltrac if it was specified as 0
     311        1536 :    if (fv_nspltrac <= 0) then
     312           0 :       dyn_state%nspltrac = max (1, dyn_state%nsplit/4)
     313             :    else
     314        1536 :       dyn_state%nspltrac = fv_nspltrac
     315             :    end if
     316             : 
     317             :    ! Set nspltvrm to 1 if it was specified as 0
     318        1536 :    if (fv_nspltvrm <= 0) then
     319           0 :       dyn_state%nspltvrm = 1
     320             :    else
     321        1536 :       dyn_state%nspltvrm = fv_nspltvrm
     322             :    end if
     323             : 
     324        1536 :    dyn_state%iord = fv_iord
     325        1536 :    dyn_state%jord = fv_jord
     326        1536 :    dyn_state%kord = fv_kord
     327             : 
     328             :    ! Calculation of orders for the C grid is fixed by D-grid IORD, JORD
     329        1536 :    if( fv_iord <= 2 ) then
     330           0 :       dyn_state%icd =  1
     331             :    else
     332        1536 :       dyn_state%icd = -2
     333             :    end if
     334             : 
     335        1536 :    if( fv_jord <= 2 ) then
     336           0 :       dyn_state%jcd =  1
     337             :    else
     338        1536 :       dyn_state%jcd =  -2
     339             :    end if
     340             : 
     341        1536 :    dyn_state%consv         = fv_conserve
     342        1536 :    dyn_state%filtcw        = fv_filtcw
     343        1536 :    dyn_state%fft_flt       = fv_fft_flt
     344        1536 :    dyn_state%div24del2flag = fv_div24del2flag
     345        1536 :    dyn_state%del2coef      = fv_del2coef
     346             : 
     347        1536 :    dyn_state%high_order_top= fv_high_order_top  
     348        1536 :    dyn_state%am_correction = fv_am_correction
     349        1536 :    dyn_state%am_geom_crrct = fv_am_geom_crrct
     350        1536 :    dyn_state%am_fixer      = fv_am_fixer
     351        1536 :    dyn_state%am_fix_lbl    = fv_am_fix_lbl
     352        1536 :    dyn_state%am_diag       = fv_am_diag
     353             : 
     354             : 
     355             :    ! There is a mod for the AM correction in the vertical diffusion code.  Make use
     356             :    ! of the physics control module to communicate whether correction is to be applied there.
     357        1536 :    call phys_setopts(fv_am_correction_in=fv_am_correction)
     358             : 
     359        1536 :    if (masterproc) then
     360           2 :       write(iulog,*)'FV dycore configuration:'
     361           2 :       write(iulog,*)'  Lagrangian time splits (fv_nsplit)                = ', fv_nsplit
     362           2 :       write(iulog,*)'  Tracer time splits (fv_nslptrac)                  = ', fv_nspltrac
     363           2 :       write(iulog,*)'  Vertical re-mapping time splits (fv_nspltvrm)     = ', fv_nspltvrm
     364           2 :       write(iulog,*)'  Scheme in E-W direction (fv_iord)                 = ', fv_iord
     365           2 :       write(iulog,*)'  Scheme in N-S direction (fv_jord)                 = ', fv_jord
     366           2 :       write(iulog,*)'  Scheme for vertical mapping (fv_kord)             = ', fv_kord
     367           2 :       write(iulog,*)'  Conservative dynamics (fv_conserve)               = ', fv_conserve
     368           2 :       write(iulog,*)'  Filtering c-grid winds (fv_filcw)                 = ', fv_filtcw
     369           2 :       write(iulog,*)'  FFT filter (fv_fft_flt)                           = ', fv_fft_flt
     370           2 :       write(iulog,*)'  Divergence/velocity damping (fv_div24del2flag)    = ', fv_div24del2flag
     371           2 :       write(iulog,*)'  Coef for 2nd order velocity damping (fv_del2coef) = ', fv_del2coef
     372           2 :       write(iulog,*)'  High-order top                                     = ', fv_high_order_top
     373           2 :       write(iulog,*)'  Geometry & pressure corr. for AM (fv_am_geom_crrct) = ', fv_am_geom_crrct
     374           2 :       write(iulog,*)'  Angular momentum (AM) correction (fv_am_correction) = ', fv_am_correction
     375           2 :       write(iulog,*)'  Apply AM fixer (fv_am_fixer)                        = ', fv_am_fixer
     376           2 :       write(iulog,*)'  Level by level AM fixer (fv_am_fix_lbl)             = ', fv_am_fix_lbl
     377           2 :       write(iulog,*)'  Enable AM diagnostics (fv_am_diag)                  = ', fv_am_diag
     378           2 :       write(iulog,*)'  '
     379             :    end if
     380             : 
     381        1536 :    call spmd_readnl(nlfilename)
     382             : 
     383        1536 :    call ctem_readnl(nlfilename)
     384        1536 :    call fill_readnl(nlfilename)
     385             : 
     386             :    !---------------------------------------------------------------------------
     387             :    contains
     388             :    !---------------------------------------------------------------------------
     389             : 
     390           0 :    integer function init_nsplit(dtime, im, jm)
     391             : 
     392             :       !-----------------------------------------------------------------------
     393             :       ! find proper value for nsplit if not specified
     394             :       !
     395             :       ! If nsplit=0 (module variable) then determine a good value
     396             :       ! for ns (used in dynpkg) based on resolution and the large-time-step
     397             :       ! (pdt). The user may have to set this manually if instability occurs.
     398             :       !
     399             :       ! REVISION HISTORY:
     400             :       !   00.10.19   Lin     Creation
     401             :       !   01.06.10   Sawyer  Modified for dynamics_init framework
     402             :       !   03.12.04   Sawyer  Moved here from dynamics_vars.  Now a function
     403             :       !-----------------------------------------------------------------------
     404             : 
     405             :       ! arguments
     406             :       real (r8), intent(in) :: dtime      !  time step
     407             :       integer,   intent(in) :: im, jm     !  Global horizontal resolution
     408             : 
     409             :       ! LOCAL VARIABLES:
     410             :       real (r8)   pdt                       ! Time-step in seconds
     411             :       real (r8)   dim
     412             :       real (r8)   dim0                      ! base dimension
     413             :       real (r8)   dt0                       ! base time step
     414             :       real (r8)   ns0                       ! base nsplit for base dimension
     415             :       real (r8)   ns                        ! final value to be returned
     416             : 
     417             :       parameter ( dim0 = 191._r8  )
     418             :       parameter ( dt0  = 1800._r8 )
     419             :       parameter ( ns0  = 4._r8    )
     420             :       !-----------------------------------------------------------------------
     421             : 
     422           0 :       pdt = int(dtime)   ! dtime is a variable internal to this module
     423           0 :       dim = max ( im, 2*(jm-1) )
     424           0 :       ns  = int ( ns0*abs(pdt)*dim/(dt0*dim0) + 0.75_r8 )
     425           0 :       ns  = max ( 1._r8, ns )   ! for cases in which dt or dim is too small
     426             : 
     427           0 :       init_nsplit = ns
     428             : 
     429        1536 :    end function init_nsplit
     430             :    !---------------------------------------------------------------------------
     431             : 
     432             : end subroutine dyn_readnl
     433             : 
     434             : !=============================================================================================
     435             : 
     436        1536 : subroutine dyn_register()
     437             : 
     438             :    use physics_buffer,  only: pbuf_add_field, dtype_r8
     439             :    use ppgrid,          only: pcols, pver
     440             :    use phys_control,    only: use_gw_front, use_gw_front_igw
     441             :    use qbo,             only: qbo_use_forcing
     442             : 
     443             :    ! These fields are computed by the dycore and passed to the physics via the
     444             :    ! physics buffer.
     445             : 
     446        1536 :    if (use_gw_front .or. use_gw_front_igw) then
     447             :       call pbuf_add_field("FRONTGF", "global", dtype_r8, (/pcols,pver/), &
     448        1536 :          frontgf_idx)
     449             :       call pbuf_add_field("FRONTGA", "global", dtype_r8, (/pcols,pver/), &
     450        1536 :          frontga_idx)
     451             :    end if
     452             : 
     453        1536 :    if (qbo_use_forcing) then
     454             :       call pbuf_add_field("UZM", "global", dtype_r8, (/pcols,pver/), &
     455           0 :          uzm_idx)
     456             :    end if
     457             : 
     458        1536 : end subroutine dyn_register
     459             : 
     460             : !=============================================================================================
     461             : 
     462        1536 : subroutine dyn_init(dyn_in, dyn_out)
     463             : 
     464             :    ! Initialize FV dynamical core state variables
     465             : 
     466        1536 :    use physconst,       only: omega, rearth, rair, cpair, zvir
     467             :    use air_composition, only: thermodynamic_active_species_idx
     468             :    use air_composition, only: thermodynamic_active_species_idx_dycore
     469             :    use air_composition, only: thermodynamic_active_species_liq_idx,thermodynamic_active_species_ice_idx
     470             :    use air_composition, only: thermodynamic_active_species_liq_idx_dycore,thermodynamic_active_species_ice_idx_dycore
     471             :    use air_composition, only: thermodynamic_active_species_liq_num, thermodynamic_active_species_ice_num
     472             :    use infnan,          only: inf, assignment(=)
     473             : 
     474             :    use constituents,    only: pcnst, cnst_name, cnst_longname, tottnam, cnst_get_ind
     475             :    use cam_history,     only: addfld, add_default, horiz_only
     476             :    use phys_control,    only: phys_getopts
     477             : 
     478             : #if ( defined OFFLINE_DYN )
     479             :    use metdata,         only: metdata_dyn_init
     480             : #endif
     481             :    use ctem,            only: ctem_init
     482             :    use diag_module,     only: fv_diag_init
     483             :    use dyn_tests_utils, only: vc_dycore, vc_moist_pressure, string_vc, vc_str_lgth
     484             : 
     485             :    ! arguments:
     486             :    type (dyn_import_t),     intent(out) :: dyn_in
     487             :    type (dyn_export_t),     intent(out) :: dyn_out
     488             : 
     489             :    ! Local variables
     490             :    type (t_fvdycore_state),     pointer :: dyn_state
     491             :    type (t_fvdycore_grid),      pointer :: grid
     492             :    type (t_fvdycore_constants), pointer :: constants
     493             : 
     494             :    real(r8) :: dt
     495             : 
     496             :    integer :: ifirstxy, ilastxy
     497             :    integer :: jfirstxy, jlastxy
     498             :    integer :: km
     499             :    integer :: ierr
     500             : 
     501             :    integer :: m, ixcldice, ixcldliq
     502             :    logical :: history_budget      ! output tendencies and state variables for budgets
     503             :    integer :: budget_hfile_num
     504             : 
     505             :    character(len=*), parameter :: sub='dyn_init'
     506             :    character (len=vc_str_lgth) :: vc_str
     507             :    !----------------------------------------------------------------------------
     508        1536 :    vc_dycore = vc_moist_pressure
     509        1536 :    if (masterproc) then
     510           2 :      call string_vc(vc_dycore,vc_str)
     511           2 :      write(iulog,*) sub//': vertical coordinate dycore   : ',trim(vc_str)
     512             :    end if
     513        1536 :    dyn_state => get_dyn_state()
     514        1536 :    grid      => dyn_state%grid
     515        1536 :    constants => dyn_state%constants
     516             : 
     517        1536 :    if (grid%high_alt) then
     518           0 :       grid%ntotq = grid%ntotq + 1 ! advect Kappa
     519           0 :       grid%kthi = grid%kthi + 1
     520           0 :       grid%kthia(:) = grid%kthia(:) + 1
     521             :    endif
     522             : 
     523             :    ! Set constants
     524        1536 :    constants%pi    = pi
     525        1536 :    constants%omega = omega
     526        1536 :    constants%ae    = rearth
     527        1536 :    constants%rair  = rair
     528        1536 :    constants%cp    = cpair
     529        1536 :    constants%cappa = rair/cpair
     530        1536 :    constants%zvir  = zvir
     531             : 
     532        1536 :    dt = get_step_size()
     533        1536 :    dyn_state%dt        = dt         ! Should this be part of state??
     534             : 
     535        1536 :    dyn_state%check_dt  = 21600.0_r8 ! Check max and min every 6 hours.
     536             : 
     537             :    ! Create the dynamics import and export state objects
     538        1536 :    ifirstxy = grid%ifirstxy
     539        1536 :    ilastxy  = grid%ilastxy
     540        1536 :    jfirstxy = grid%jfirstxy
     541        1536 :    jlastxy  = grid%jlastxy
     542        1536 :    km       = grid%km
     543             : 
     544             :    allocate(dyn_in%phis(  ifirstxy:ilastxy,jfirstxy:jlastxy),          &
     545             :             dyn_in%ps(    ifirstxy:ilastxy,jfirstxy:jlastxy),          &
     546             :             dyn_in%u3s(   ifirstxy:ilastxy,jfirstxy:jlastxy,km),       &
     547             :             dyn_in%v3s(   ifirstxy:ilastxy,jfirstxy:jlastxy,km),       &
     548             :             dyn_in%pe(    ifirstxy:ilastxy,km+1,jfirstxy:jlastxy),     &
     549             :             dyn_in%pt(    ifirstxy:ilastxy,jfirstxy:jlastxy,km),       &
     550             :             dyn_in%t3(    ifirstxy:ilastxy,jfirstxy:jlastxy,km),       &
     551             :             dyn_in%pk(    ifirstxy:ilastxy,jfirstxy:jlastxy,km+1),     &
     552             :             dyn_in%pkz(   ifirstxy:ilastxy,jfirstxy:jlastxy,km),       &
     553             :             dyn_in%delp(  ifirstxy:ilastxy,jfirstxy:jlastxy,km),       &
     554             :             dyn_in%tracer(ifirstxy:ilastxy,jfirstxy:jlastxy,km, grid%ntotq ), &
     555       58368 :             stat=ierr)
     556             : 
     557        1536 :    if ( ierr /= 0 ) then
     558           0 :       write(iulog,*) sub//': ERROR: allocating components of dyn_in.  ierr=', ierr
     559           0 :       call endrun(sub//': ERROR: allocating components of dyn_in')
     560             :    end if
     561             : 
     562        1536 :    dyn_in%phis   = inf
     563        1536 :    dyn_in%ps     = inf
     564        1536 :    dyn_in%u3s    = inf
     565        1536 :    dyn_in%v3s    = inf
     566        1536 :    dyn_in%pe     = inf
     567        1536 :    dyn_in%pt     = inf
     568        1536 :    dyn_in%t3     = inf
     569        1536 :    dyn_in%pk     = inf
     570        1536 :    dyn_in%pkz    = inf
     571        1536 :    dyn_in%delp   = inf
     572        1536 :    dyn_in%tracer = inf
     573             : 
     574             :    ! Export object has all of these except phis
     575        1536 :    dyn_out%phis   => dyn_in%phis
     576        1536 :    dyn_out%ps     => dyn_in%ps
     577        1536 :    dyn_out%u3s    => dyn_in%u3s
     578        1536 :    dyn_out%v3s    => dyn_in%v3s
     579        1536 :    dyn_out%pe     => dyn_in%pe
     580        1536 :    dyn_out%pt     => dyn_in%pt
     581        1536 :    dyn_out%t3     => dyn_in%t3
     582        1536 :    dyn_out%pk     => dyn_in%pk
     583        1536 :    dyn_out%pkz    => dyn_in%pkz
     584        1536 :    dyn_out%delp   => dyn_in%delp
     585        1536 :    dyn_out%tracer => dyn_in%tracer
     586             : 
     587             :    ! And several more which are not in the import container
     588             :    allocate(dyn_out%peln (ifirstxy:ilastxy,km+1,jfirstxy:jlastxy),&
     589             :             dyn_out%omga (ifirstxy:ilastxy,km,jfirstxy:jlastxy),  &
     590             :             dyn_out%mfx  (ifirstxy:ilastxy,jfirstxy:jlastxy,km),  &
     591             :             dyn_out%mfy  (ifirstxy:ilastxy,jfirstxy:jlastxy,km),  &
     592       24576 :             stat=ierr)
     593        1536 :    if ( ierr /= 0 ) then
     594           0 :       write(iulog,*) sub//': ERROR: allocating components of dyn_out.  ierr=', ierr
     595           0 :       call endrun(sub//': ERROR: allocating components of dyn_out')
     596             :    end if
     597             : 
     598        1536 :    if (dyn_state%am_fixer .or. dyn_state%am_diag) then
     599             : 
     600             :       allocate( &
     601             :          dyn_out%duf3s(ifirstxy:ilastxy,jfirstxy:jlastxy,km),  &
     602           0 :          stat=ierr)
     603           0 :       if ( ierr /= 0 ) then
     604           0 :          write(iulog,*) sub//': ERROR: allocating duf3s components of dyn_out.  ierr=', ierr
     605           0 :          call endrun(sub//': ERROR: allocating duf3s components of dyn_out')
     606             :       end if
     607           0 :       dyn_out%duf3s= inf
     608             :    end if
     609             : 
     610        1536 :    if (dyn_state%am_diag) then
     611             :       allocate( &
     612             :          dyn_out%du3s (ifirstxy:ilastxy,jfirstxy:jlastxy,km),  &
     613             :          dyn_out%dv3s (ifirstxy:ilastxy,jfirstxy:jlastxy,km),  &
     614             :          dyn_out%dua3s(ifirstxy:ilastxy,jfirstxy:jlastxy,km),  &
     615             :          dyn_out%dva3s(ifirstxy:ilastxy,jfirstxy:jlastxy,km),  &
     616           0 :          stat=ierr)
     617           0 :       if ( ierr /= 0 ) then
     618           0 :          write(iulog,*) sub//': ERROR: allocating du3s components of dyn_out.  ierr=', ierr
     619           0 :          call endrun(sub//': ERROR: allocating du3s components of dyn_out')
     620             :       end if
     621           0 :       dyn_out%du3s = inf
     622           0 :       dyn_out%dv3s = inf
     623           0 :       dyn_out%dua3s= inf
     624           0 :       dyn_out%dva3s= inf
     625             :    end if
     626             : 
     627        1536 :    dyn_out%peln = inf
     628        1536 :    dyn_out%omga = inf
     629        1536 :    dyn_out%mfx  = inf
     630        1536 :    dyn_out%mfy  = inf
     631             : 
     632             : #if ( defined OFFLINE_DYN )
     633             :    call metdata_dyn_init(grid)
     634             : #endif
     635             : 
     636             :    ! Setup circulation diagnostics
     637        1536 :    call ctem_init()
     638             : 
     639             :    ! Diagnostics for AM
     640        1536 :    if (dyn_state%am_diag) call fv_diag_init()
     641             : 
     642        1536 :    call set_phis(dyn_in)
     643             : 
     644        1536 :    if (initial_run) then
     645             : 
     646             :       ! Read in initial data
     647         768 :       call read_inidat(dyn_in)
     648         768 :       call clean_iodesc_list()
     649             : 
     650             :    end if
     651             : 
     652             :    ! History output
     653             : 
     654        3072 :    call addfld('US',    (/ 'lev' /),'A','m/s','Zonal wind, staggered', gridname='fv_u_stagger')
     655        3072 :    call addfld('VS',    (/ 'lev' /),'A','m/s','Meridional wind, staggered', gridname='fv_v_stagger')
     656        3072 :    call addfld('US&IC', (/ 'lev' /),'I','m/s','Zonal wind, staggered', gridname='fv_u_stagger')
     657        3072 :    call addfld('VS&IC', (/ 'lev' /),'I','m/s','Meridional wind, staggered', gridname='fv_v_stagger')
     658        1536 :    call addfld('PS&IC', horiz_only, 'I','Pa', 'Surface pressure', gridname='fv_centers')
     659        3072 :    call addfld('T&IC',  (/ 'lev' /),'I','K',  'Temperature', gridname='fv_centers')
     660      336384 :    do m = 1, pcnst
     661      671232 :       call addfld(trim(cnst_name(m))//'&IC',(/ 'lev' /),'I','kg/kg', cnst_longname(m), gridname='fv_centers')
     662             :    end do
     663      336384 :    do m = 1, pcnst
     664      669696 :       call addfld(tottnam(m),(/ 'lev' /),'A','kg/kg/s',trim(cnst_name(m))//' horz + vert + fixer tendency ',  &
     665     1340928 :                   gridname='fv_centers')
     666             :    end do
     667             : 
     668        1536 :    call add_default('US&IC   ', 0, 'I')
     669        1536 :    call add_default('VS&IC   ', 0, 'I')
     670        1536 :    call add_default('PS&IC      ',0, 'I')
     671        1536 :    call add_default('T&IC       ',0, 'I')
     672      336384 :    do m = 1, pcnst
     673      336384 :       call add_default(trim(cnst_name(m))//'&IC',0, 'I')
     674             :    end do
     675             : 
     676        3072 :    call addfld('DUH',      (/ 'lev' /), 'A','K/s','U horizontal diffusive heating', gridname='fv_centers')
     677        3072 :    call addfld('DVH',      (/ 'lev' /), 'A','K/s','V horizontal diffusive heating', gridname='fv_centers')
     678             :    call addfld('ENGYCORR', (/ 'lev' /), 'A','W/m2','Energy correction for over-all conservation', &
     679        3072 :                                                    gridname='fv_centers')
     680             : 
     681        3072 :    call addfld('FU',       (/ 'lev' /), 'A','m/s2','Zonal wind forcing term', gridname='fv_centers')
     682        3072 :    call addfld('FV',       (/ 'lev' /), 'A','m/s2','Meridional wind forcing term', gridname='fv_centers')
     683             :    call addfld('FU_S',     (/ 'lev' /), 'A','m/s2','Zonal wind forcing term on staggered grid', &
     684        3072 :                                                    gridname='fv_u_stagger')
     685             :    call addfld('FV_S',     (/ 'lev' /), 'A','m/s2','Meridional wind forcing term on staggered grid', &
     686        3072 :                                                    gridname='fv_v_stagger')
     687        3072 :    call addfld('TTEND',    (/ 'lev' /), 'A','K/s','Total T tendency (all processes)', gridname='fv_centers')
     688        1536 :    call addfld('LPSTEN',   horiz_only,  'A','Pa/s','Surface pressure tendency',       gridname='fv_centers')
     689        3072 :    call addfld('VAT',      (/ 'lev' /), 'A','K/s','Vertical advective tendency of T', gridname='fv_centers')
     690        3072 :    call addfld('KTOOP',    (/ 'lev' /), 'A','K/s','(Kappa*T)*(omega/P)',              gridname='fv_centers')
     691             : 
     692        1536 :    call phys_getopts(history_budget_out=history_budget, history_budget_histfile_num_out=budget_hfile_num)
     693        1536 :    if ( history_budget ) then
     694           0 :       call cnst_get_ind('CLDLIQ', ixcldliq)
     695           0 :       call cnst_get_ind('CLDICE', ixcldice)
     696           0 :       call add_default(tottnam(       1), budget_hfile_num, ' ')
     697           0 :       call add_default(tottnam(ixcldliq), budget_hfile_num, ' ')
     698           0 :       call add_default(tottnam(ixcldice), budget_hfile_num, ' ')
     699           0 :       call add_default('TTEND   '       , budget_hfile_num, ' ')
     700             :    end if
     701             : 
     702        9216 :    thermodynamic_active_species_idx_dycore(:) = thermodynamic_active_species_idx(:)
     703        4608 :    do m=1,thermodynamic_active_species_liq_num
     704        3072 :      thermodynamic_active_species_liq_idx_dycore(m) = thermodynamic_active_species_liq_idx(m)
     705        4608 :      if (masterproc) then
     706           4 :        write(iulog,*) sub//": m,thermodynamic_active_species_idx_liq_dycore: ",m,thermodynamic_active_species_liq_idx_dycore(m)
     707             :      end if
     708             :    end do
     709        4608 :    do m=1,thermodynamic_active_species_ice_num
     710        3072 :      thermodynamic_active_species_ice_idx_dycore(m) = thermodynamic_active_species_ice_idx(m)
     711        4608 :      if (masterproc) then
     712           4 :        write(iulog,*) sub//": m,thermodynamic_active_species_idx_ice_dycore: ",m,thermodynamic_active_species_ice_idx_dycore(m)
     713             :      end if
     714             :    end do
     715             : 
     716        3072 : end subroutine dyn_init
     717             : 
     718             : !=============================================================================================
     719             : 
     720       16128 : subroutine dyn_run(ptop, ndt, te0, dyn_state, dyn_in, dyn_out, rc)
     721             : 
     722             :    ! Driver for the NASA finite-volume dynamical core
     723             :    !
     724             :    ! Developer: Shian-Jiann Lin, NASA/GSFC; email: lin@dao.gsfc.nasa.gov
     725             :    !
     726             :    ! Top view of D-grid prognostatic variables: u, v, and delp (and other scalars)
     727             :    !
     728             :    !               u(i,j+1)
     729             :    !                 |
     730             :    !      v(i,j)---delp(i,j)---v(i+1,j)
     731             :    !                 |
     732             :    !               u(i,j)
     733             :    !
     734             :    ! External routine required:
     735             :    !
     736             :    ! The user needs to supply a subroutine to set up
     737             :    ! Eulerian vertical coordinate" for remapping purpose.
     738             :    ! Currently this routine is named as set_eta()
     739             :    ! In principle any terrian following vertical
     740             :    ! coordinate can be used. The input to fvcore
     741             :    ! need not be on the same vertical coordinate
     742             :    ! as the output.
     743             :    ! If SPMD is defined the Pilgrim communication
     744             :    ! library developed by Will Sawyer will be needed.
     745             :    !
     746             :    ! Remarks:
     747             :    !
     748             :    ! Values at poles for both u and v need not be defined; but values for
     749             :    ! all other scalars needed to be defined at both poles (as polar cap mean
     750             :    ! quantities). Tracer advection is done "off-line" using the
     751             :    ! large time step. Consistency is maintained by using the time accumulated
     752             :    ! Courant numbers and horizontal mass fluxes for the FFSL algorithm.
     753             :    ! The input "pt" can be either dry potential temperature
     754             :    ! defined as T/pkz (adiabatic case) or virtual potential temperature
     755             :    ! defined as T*/pkz (full phys case). IF convt is true, pt is not updated.
     756             :    ! Instead, virtual temperature is ouput.
     757             :    ! ipt is updated if convt is false.
     758             :    ! The user may set the value of nx to optimize the SMP performance
     759             :    ! The optimal valuse of nx depends on the total number of available
     760             :    ! shared memory CPUs per node (NS). Assuming the maximm MPI
     761             :    ! decomposition is used in the y-direction, set nx=1 if the
     762             :    ! NS <=4; nx=4 if NS=16.
     763             :    !
     764             :    ! A 2D xy decomposition is used for handling the Lagrangian surface
     765             :    ! remapping, the ideal physics, and (optionally) the geopotential
     766             :    ! calculation.
     767             :    !
     768             :    ! The transpose from yz to xy decomposition takes place within dynpkg.
     769             :    ! The xy decomposed variables are then transposed directly to the
     770             :    ! physics decomposition within d_p_coupling.
     771             :    !
     772             :    ! The xy decomposed variables have names corresponding to the
     773             :    ! yz decomposed variables: simply append "xy". Thus, "uxy" is the
     774             :    ! xy decomposed version of "u".
     775             :    !
     776             :    ! This version supports overlap of trac2d and cd_core subcycles (Art Mirin, November 2007).
     777             :    ! This refers to the subcycles described by the "do n=1,n2" loop and has nothing to
     778             :    ! do with the "do it=1,nsplit" lower-level subcycling. Each trac2d call (n), other than the last,
     779             :    ! is overlapped with the subsequent cd_core 'series' (n+1). The controlling namelist variable
     780             :    ! is ct_overlap. The overlapping trac2d calls are carried out on the second set of
     781             :    ! npes_yz processes (npes_yz <= iam < 2*npes_yz). The tracer arrays are sent to the
     782             :    ! auxiliary processes prior to the do n=1,n2 loop. During each subcycle (other than the last),
     783             :    ! the dp0 array is sent prior to the cd_core series; arrays cx, cy, mfx, mfy are sent directly
     784             :    ! from cd_core during the last call in the series (it=nsplit). At the completion of the last
     785             :    ! auxiliary trac2d subcycle (n=n2-1), the updated tracer values are returned to the
     786             :    ! primary processes; the last tracer subcycle (n=n2) is carried out on the primary processes.
     787             :    ! Communication calls are nonblocking, with attempt to overlap computation to the extent
     788             :    ! possible. The CCSM mpi layer (wrap_mpi) is used. Tags with values greater than npes_xy
     789             :    ! are chosen to avoid possible interference between the messages sent from cd_core and
     790             :    ! the geopk-related transpose messages called from cd_core thereafter. The auxiliary
     791             :    ! processes must use values of jfirst, jlast, kfirst, klast corresponding to their primary
     792             :    ! process antecedents, whereas by design those values are (1,0,1,0), resp. (set in spmdinit_dyn).
     793             :    ! We therefore add auxiliary subdomain limits to the grid datatype: jfirstct, jlastct,
     794             :    ! kfirstct, klastct. For the primary processes, these are identical to the actual subdomain
     795             :    ! limits; for the secondary processes, these correspond to the subdomain limits of the
     796             :    ! antecedent primary process. These values are communicated to the auxiliary processes
     797             :    ! during initialization (spmd_vars_init). During the auxiliary calculations (and allocations)
     798             :    ! we temporarily set jfirst equal to jfirstct (etc.) and when done, restore to the original
     799             :    ! values. Other information needed by the auxiliary processes is obtained through the grid
     800             :    ! datatype.
     801             :    !
     802             :    ! This version supports tracer decomposition with trac2d (Art Mirin, January 2008).
     803             :    ! This option is mutually exclusive with ct_overlap. Variable "trac_decomp" is the size of the
     804             :    ! decomposition. The tracers are divided into trac_decomp groups, and the kth group is solved
     805             :    ! on the kth set of npes_yz processes. Much of the methodology is similar to that for ct_overlap.
     806             :    !
     807             :    ! REVISION HISTORY:
     808             :    !   SJL 99.04.13:  Initial SMP version delivered to Will Sawyer
     809             :    !   WS  99.10.03:  1D MPI completed and tested;
     810             :    !   WS  99.10.11:  Additional documentation
     811             :    !   WS  99.10.28:  benergy and te_map added; arrays pruned
     812             :    !   SJL 00.01.01:  SMP and MPI enhancements; documentation
     813             :    !   WS  00.07.13:  Changed PILGRIM API
     814             :    !   WS  00.08.28:  SPMD instead of MPI_ON
     815             :    !   AAM 00.08.10:  Add kfirst:klast
     816             :    !   WS  00.12.19:  phis now distr., LLNL2DModule initialized here
     817             :    !   WS  01.02.02:  bug fix: parsplit only called for FIRST time
     818             :    !   WS  01.04.09:  Added initialization of ghost regions
     819             :    !   WS  01.06.10:  Removed if(first) section; use module
     820             :    !   AAM 01.06.27:  Extract te_map call into separate routine
     821             :    !   AAM 01.07.13:  Get rid of dynpkg2; recombine te_map;
     822             :    !                  perform forward transposes for 2D decomposition
     823             :    !   WS  01.12.10:  Ghosted PT (changes benergy, cd_core, te_map, hswf)
     824             :    !   WS  03.08.05:  removed vars dcaf, rayf, ideal, call to hswf
     825             :    !                  (idealized physics is now in physics package)
     826             :    !   WS  03.08.13:  Removed ghost region from UXY
     827             :    !   WS  05.06.11:  Inserted into FVCAM_GridCompMod
     828             :    !   WS  06.03.03:  Added dyn_state as argument (for reentrancy)
     829             :    !   WS  06.06.28:  Using new version of benergy
     830             :    !   TT  16.12.11:  AM conservation options
     831             :    !   TT  17.30.01:  dynamic wind increments diagnostic
     832             :    !-----------------------------------------------------------------------
     833             : 
     834             : 
     835        1536 :    use diag_module, only   : compute_vdot_gradp
     836             : 
     837             : #if defined( SPMD )
     838             :    use mpishorthand,   only: mpir8
     839             :    use mod_comm,       only: mp_sendirr, mp_recvirr, mp_send4d_ns, &
     840             :                              mp_send3d, mp_recv3d, &
     841             :                              mp_recv4d_ns, mp_sendtrirr, mp_recvtrirr
     842             : #endif
     843             : #if ( defined OFFLINE_DYN )
     844             :    use metdata, only: get_met_fields, advance_met, get_us_vs, met_rlx
     845             :    use pfixer,  only: adjust_press
     846             : #endif
     847             :    use metdata, only: met_fix_mass
     848             : 
     849             :    use shr_reprosum_mod, only: shr_reprosum_calc
     850             :    use cam_thermo,       only: cam_thermo_calc_kappav
     851             : 
     852             : #if defined( SPMD )
     853             : #include "mpif.h"
     854             : #endif
     855             : 
     856             :    ! arguments
     857             :    real(r8),            intent(in)    :: ptop      ! Pressure at model top (interface pres)
     858             :    integer,             intent(in)    :: ndt       ! the large time step in seconds
     859             :                                                    ! Also the mapping time step in this setup
     860             : 
     861             :    real(r8),            intent(out)   :: te0       ! Total energy before dynamics
     862             :    type (T_FVDYCORE_STATE), target    :: dyn_state ! Internal state
     863             :    type (dyn_import_t), intent(in)    :: dyn_in    ! Import container
     864             :    type (dyn_export_t), intent(inout) :: dyn_out   ! Export container
     865             : 
     866             :    integer,             intent(out)   :: rc        ! Return code
     867             : 
     868             :    integer, parameter  ::  DYN_RUN_SUCCESS           = 0
     869             :    integer, parameter  ::  DYN_RUN_FAILURE           = -1
     870             :    integer, parameter  ::  DYN_RUN_R4_NOT_SUPPORTED  = -10
     871             :    integer, parameter  ::  DYN_RUN_MUST_BE_2D_DECOMP = -20
     872             :    real(r8), parameter ::  D1_0                      = 1.0_r8
     873             : 
     874             :    ! Variables from the dynamics interface (import or export)
     875             : 
     876       16128 :    real(r8), pointer :: phisxy(:,:)   ! surface geopotential (grav*zs)
     877       16128 :    real(r8), pointer :: psxy(:,:)     ! Surface pressure (pa)
     878       16128 :    real(r8), pointer :: t3xy(:,:,:)   ! temperature (K)
     879       16128 :    real(r8), pointer :: ptxy(:,:,:)   ! scaled (virtual) potential temperature
     880       16128 :    real(r8), pointer :: delpxy(:,:,:) ! Pressure thickness
     881       16128 :    real(r8), pointer :: tracer(:,:,:,:) ! Tracers
     882       16128 :    real(r8), pointer :: uxy(:,:,:)    ! u wind velocities, staggered grid
     883       16128 :    real(r8), pointer :: vxy(:,:,:)    ! v wind velocities, staggered grid
     884             : 
     885             :    !--------------------------------------------------------------------------------------
     886             :    ! The arrays pexy, pkxy, pkzxy must be pre-computed as input to benergy().
     887             :    ! They are NOT needed if dyn_state%consv=.F.; updated on output (to be used
     888             :    ! by physdrv) Please refer to routine pkez on the algorithm for computing pkz
     889             :    ! from pe and pk
     890             :    !--------------------------------------------------------------------------------------
     891             : 
     892       16128 :    real(r8), pointer :: pexy(:,:,:)   ! Pres at layer edges
     893       16128 :    real(r8), pointer :: pkxy(:,:,:)   ! pe**cappa
     894       16128 :    real(r8), pointer :: pkzxy(:,:,:)  ! finite-volume mean of pk
     895             : 
     896             :    ! Export state only variables
     897       16128 :    real(r8), pointer :: pelnxy(:,:,:)   ! Natural logarithm of pe
     898       16128 :    real(r8), pointer :: omgaxy(:,:,:)   ! vertical pressure velocity (pa/sec)
     899       16128 :    real(r8), pointer :: mfxxy(:,:,:)    ! mass flux in X (Pa m^\2 / s)
     900       16128 :    real(r8), pointer :: mfyxy(:,:,:)    ! mass flux in Y (Pa m^\2 / s)
     901       16128 :    real(r8), pointer :: duxy(:,:,:)     ! u tot. tend. from dycore, staggered grid
     902       16128 :    real(r8), pointer :: dvxy(:,:,:)     ! v tot. tend. from dycore, staggered grid
     903       16128 :    real(r8), pointer :: ucxy(:,:,:)     ! u tend. from advection only, staggd grid
     904       16128 :    real(r8), pointer :: vcxy(:,:,:)     ! v tend. from advection only, staggd grid
     905       16128 :    real(r8), pointer :: dufix_xy(:,:,:) ! u tend. from AM fixer, staggered grid
     906             : 
     907             :    ! Other pointers (for convenience)
     908             :    type (T_FVDYCORE_GRID)      , pointer :: GRID      ! For convenience
     909             :    type (T_FVDYCORE_CONSTANTS) , pointer :: CONSTANTS ! For convenience
     910             : 
     911             :    !  YZ variables currently allocated on stack... should they be on the heap?
     912             : 
     913       32256 :    real(r8) :: ps(dyn_state%grid%im,dyn_state%grid%jfirst:dyn_state%grid%jlast)
     914       32256 :    real(r8) :: phis(dyn_state%grid%im,dyn_state%grid%jfirst:dyn_state%grid%jlast)
     915             :    real(r8) :: pe(dyn_state%grid%im,  &
     916             :                   dyn_state%grid%kfirst:dyn_state%grid%klast+1,&
     917       32256 :                   dyn_state%grid%jfirst:dyn_state%grid%jlast)
     918             :    real(r8) :: delp(dyn_state%grid%im,dyn_state%grid%jfirst:dyn_state%grid%jlast,&
     919       32256 :                     dyn_state%grid%kfirst:dyn_state%grid%klast)
     920             :    real(r8) :: pk(dyn_state%grid%im,dyn_state%grid%jfirst:dyn_state%grid%jlast,&
     921       32256 :                   dyn_state%grid%kfirst:dyn_state%grid%klast+1)
     922             :    real(r8) :: pkz(dyn_state%grid%im,dyn_state%grid%jfirst:dyn_state%grid%jlast, &
     923       32256 :                    dyn_state%grid%kfirst:dyn_state%grid%klast)
     924             :    real(r8) :: u(dyn_state%grid%im,   &
     925             :                  dyn_state%grid%jfirst-dyn_state%grid%ng_d:dyn_state%grid%jlast+dyn_state%grid%ng_s,&
     926       32256 :                  dyn_state%grid%kfirst:dyn_state%grid%klast)
     927             :    real(r8) :: v(dyn_state%grid%im,   &
     928             :                  dyn_state%grid%jfirst-dyn_state%grid%ng_s:dyn_state%grid%jlast+dyn_state%grid%ng_d,&
     929       32256 :                  dyn_state%grid%kfirst:dyn_state%grid%klast)
     930             :    real(r8) :: pt(dyn_state%grid%im,  &
     931             :                   dyn_state%grid%jfirst-dyn_state%grid%ng_d:dyn_state%grid%jlast+dyn_state%grid%ng_d,&
     932       32256 :                   dyn_state%grid%kfirst:dyn_state%grid%klast)
     933             : 
     934             :    real(r8) :: pi
     935             :    real(r8) :: om       ! angular velocity of earth's rotation
     936             :    real(r8) :: cp       ! heat capacity of air at constant pressure
     937             :    real(r8) :: ae       ! radius of the earth (m)
     938             : 
     939             :    real(r8) :: rair     ! Gas constant of the air
     940             :    real(r8) :: cappa    ! R/Cp
     941             :    real(r8) :: zvir     ! Virtual effect constant ( = rwv/rair-1 )
     942             : 
     943             :    logical :: consv     ! Energy conserved?
     944             : 
     945             :    integer :: im        ! dimension in east-west
     946             :    integer :: jm        ! dimension in North-South
     947             :    integer :: km        ! number of Lagrangian layers
     948             :    integer :: jfirst    ! starting latitude index for MPI
     949             :    integer :: jlast     ! ending latitude index for MPI
     950             :    integer :: kfirst    ! starting vertical index for MPI
     951             :    integer :: klast     ! ending vertical index for MPI
     952             :    integer :: ntotq     ! total # of tracers to be advected
     953             :    integer :: iord      ! parameter controlling monotonicity in E-W
     954             :                                    ! recommendation: iord=4
     955             :    integer :: jord      ! parameter controlling monotonicity in N-S
     956             :                                    ! recommendation: jord=4
     957             :    integer :: kord      ! parameter controlling monotonicity in mapping
     958             :                                    ! recommendation: kord=4
     959             :    integer :: icd       ! X algorithm order on C-grid
     960             :    integer :: jcd       ! Y algorithm order on C-grid
     961             :    integer :: ng_d      ! Ghosting width on D-grid
     962             :    integer :: ng_s      ! Ghosting width (staggered, for winds)
     963             :    integer :: ns        ! overall split
     964             :    integer :: div24del2flag
     965             :    real(r8):: del2coef
     966             : 
     967             :    integer :: ifirstxy, ilastxy, jfirstxy, jlastxy  ! xy decomposition
     968             :    integer :: npr_z
     969             : 
     970             :    logical :: cd_penul
     971             : 
     972       16128 :    real(r8), allocatable, target    :: q_internal(:,:,:,:)    ! Pointers to tracers
     973             :    integer i, j, k, iq          ! Loop indicies
     974             :    real(r8) umax                ! Maximum winds, m/s
     975             :    parameter (umax = 300.0_r8)
     976             : 
     977             :    integer    nx          ! # of split pieces in x-direction; for performance, the
     978             : #if defined( UNICOSMP )
     979             :    parameter (nx = 1)
     980             : #else
     981             :    parameter (nx = 4)     ! user may set nx=1 if there is NO shared memory multitasking
     982             : #endif
     983             :    integer :: ipe, it, iv
     984             :    integer :: nsplit, n, n2, nv
     985             :    integer :: mq
     986             : 
     987             :    ! Move the following 3D arrays to an initialization routine?
     988       16128 :    real(r8), allocatable :: worka(:,:,:),workb(:,:,:),dp0(:,:,:),cx(:,:,:),cy(:,:,:)
     989       16128 :    real(r8), allocatable :: mfx(:,:,:), mfy(:,:,:)
     990       16128 :    real(r8), allocatable :: delpf(:,:,:), uc(:,:,:), vc(:,:,:)
     991       16128 :    real(r8), allocatable :: dwz(:,:,:), pkc(:,:,:), wz(:,:,:)
     992       16128 :    real(r8), allocatable :: dpt(:,:,:)
     993       16128 :    real(r8), allocatable :: pkcc(:,:,:), wzc(:,:,:)
     994             : 
     995             :    ! The following variables are work arrays for xy=>yz transpose
     996       16128 :    real(r8), allocatable :: pkkp(:,:,:), wzkp(:,:,:)
     997             : 
     998             :    ! The following variables are xy instantiations
     999       16128 :    real(r8), allocatable :: tempxy(:,:,:), dp0xy(:,:,:), wzxy(:,:,:)
    1000             : 
    1001             :    ! psxy3 is dummy 3d variant of psxy
    1002       16128 :    real(r8), allocatable :: psxy3(:,:,:)
    1003             : 
    1004             :    ! phisxy3 is dummy 3d variant of phisxy
    1005       16128 :    real(r8), allocatable :: phisxy3(:,:,:)
    1006       16128 :    real(r8), pointer     :: q3xypt(:,:,:)
    1007       16128 :    real(r8), pointer     :: q3yzpt(:,:,:)
    1008       32256 :    real(r8)              :: tte(dyn_state%grid%jm)
    1009       32256 :    real(r8)              :: XXX(dyn_state%grid%km)
    1010             : 
    1011             : #if ( defined OFFLINE_DYN )
    1012             :    real(r8), allocatable :: ps_obs(:,:)
    1013             :    real(r8), allocatable :: ps_mod(:,:)
    1014             :    real(r8), allocatable :: u_tmp(:,:,:)
    1015             :    real(r8), allocatable :: v_tmp(:,:,:)
    1016             : #endif
    1017             : 
    1018             :    logical :: fill
    1019             : 
    1020             :    real(r8) :: dt
    1021             :    real(r8) :: bdt
    1022             :    integer  :: filtcw
    1023             :    integer  :: ct_overlap
    1024             :    integer  :: trac_decomp
    1025             : 
    1026             :    integer modc_tracers, mlast
    1027             : 
    1028             :    ! cd_core / trac2d overlap and tracer decomposition data (AAM)
    1029             :    integer :: commnyz                                         ! n*npes_yz communicator
    1030             :    integer :: jfirstct, jlastct, kfirstct, klastct            ! primary subdomain limits
    1031             :    integer :: jkstore(4)                                      ! storage for subdomain limits
    1032             :    integer :: iamlocal                                        ! task number (global indexing)
    1033       32256 :    integer :: iremotea(dyn_state%grid%trac_decomp)            ! source/target; id array
    1034             :    integer :: iremote                                         ! source/target; working id
    1035             :    integer :: ndp0, ncx, ncy, nmfx, nmfy, ntrac               ! message sizes
    1036             :    integer :: dp0tag, cxtag, cytag, mfxtag, mfytag, tractag   ! message tags
    1037       32256 :    integer :: cxtaga(dyn_state%grid%trac_decomp)              ! tag arrays for cd_core
    1038       32256 :    integer :: cytaga(dyn_state%grid%trac_decomp)              ! tag arrays for cd_core
    1039       32256 :    integer :: mfxtaga(dyn_state%grid%trac_decomp)             ! tag arrays for cd_core
    1040       32256 :    integer :: mfytaga(dyn_state%grid%trac_decomp)             ! tag arrays for cd_core
    1041             :    logical :: ct_aux                                          ! true if auxiliary process
    1042             :    logical :: s_trac                                          ! true for cd_core posting tracer-related sends
    1043       16128 :    integer, allocatable :: ctreq(:,:)                         ! used for nonblocking receive
    1044       16128 :    integer, allocatable :: ctstat(:,:,:)                      ! used for nonblocking receive
    1045       16128 :    integer, allocatable :: ctreqs(:,:)                        ! used for nonblocking send
    1046       16128 :    integer, allocatable :: ctstats(:,:,:)                     ! used for nonblocking send
    1047       16128 :    integer, allocatable :: cdcreqs(:,:)                       ! used for nonblocking send in cd_core
    1048       16128 :    integer, pointer :: ktloa(:)                               ! lower limit of tracer decomposition (global)
    1049       16128 :    integer, pointer :: kthia(:)                               ! upper limit of tracer decomposition (global)
    1050             :    integer ktlo                                               ! lower limit of tracer decomposition (local)
    1051             :    integer kthi                                               ! upper limit of tracer decomposition (local)
    1052             :    integer kt, tagu, naux, kaux, ntg0
    1053             : 
    1054             :    logical :: print_subcycling = .true.
    1055             :    logical :: c_dotrac, t_dotrac
    1056             :    logical :: convt_local
    1057             : 
    1058             :    data fill  /.true./              ! perform a simple filling algorithm
    1059             :                                     ! in case negatives were found
    1060             : 
    1061             :    ! C.-C. Chen, omega calculation
    1062             :    real(r8) :: cx_om(dyn_state%grid%im,dyn_state%grid%jfirst:dyn_state%grid%jlast, &
    1063       32256 :                      dyn_state%grid%kfirst:dyn_state%grid%klast)        ! Courant no. in X
    1064             :    real(r8) :: cy_om(dyn_state%grid%im,dyn_state%grid%jfirst:dyn_state%grid%jlast+1, &
    1065       32256 :                      dyn_state%grid%kfirst:dyn_state%grid%klast)        ! Courant no. in Y
    1066             :    real(r8) :: pexy_om(dyn_state%grid%ifirstxy:dyn_state%grid%ilastxy,dyn_state%grid%km+1, &
    1067       32256 :                        dyn_state%grid%jfirstxy:dyn_state%grid%jlastxy)
    1068             : 
    1069             :    ! Non-constant air properties for high top models (waccmx).
    1070             :    real(r8) :: cap3vi(dyn_state%grid%ifirstxy:dyn_state%grid%ilastxy,&
    1071       32256 :                       dyn_state%grid%jfirstxy:dyn_state%grid%jlastxy,dyn_state%grid%km+1)
    1072             :    real(r8) :: cp3vc (dyn_state%grid%im,dyn_state%grid%jfirst:dyn_state%grid%jlast,&
    1073       32256 :                       dyn_state%grid%kfirst:dyn_state%grid%klast)         !C_p on yz
    1074             :    real(r8) :: cap3vc(dyn_state%grid%im,dyn_state%grid%jfirst:dyn_state%grid%jlast,&
    1075       32256 :                       dyn_state%grid%kfirst:dyn_state%grid%klast)        !cappa on yz
    1076             : 
    1077             :    real(r8), dimension(dyn_state%grid%ifirstxy:dyn_state%grid%ilastxy,&
    1078             :                        dyn_state%grid%jfirstxy:dyn_state%grid%jlastxy,dyn_state%grid%km) :: &
    1079       32256 :              cp3v,cap3v
    1080             :    logical :: high_alt
    1081             : 
    1082             :    ! angular momentum (AM) conservation
    1083             :    logical  :: am_correction         ! apply AM correction?
    1084             :    logical  :: am_geom_crrct         ! apply AM geom. corr?
    1085             :    logical  :: am_fixer              ! apply AM fixer?
    1086             :    logical  :: am_fix_lbl            ! apply fixer separately on each shallow-water layer?
    1087             :    logical  :: am_fix_taper=.false.  ! def. no tapering; modified if global fixer applied or high_order_top=.false.
    1088             :    real(r8) :: tmpsum(1,2)
    1089             :    real(r8) :: tmpresult(2)
    1090             :    real(r8) :: am0, am1, me0
    1091             : 
    1092       32256 :    real(r8) :: don(dyn_state%grid%jm,dyn_state%grid%km), & ! out of cd_core
    1093       32256 :                dod(dyn_state%grid%jm,dyn_state%grid%km)    ! out of cd_core
    1094       32256 :    real(r8) :: dons(dyn_state%grid%km), &                  ! sums over j
    1095       32256 :                dods(dyn_state%grid%km)
    1096             : 
    1097       16128 :    real(r8), allocatable :: zpkck(:,:)
    1098       32256 :    real(r8) :: avgpk(dyn_state%grid%km)
    1099       32256 :    real(r8) :: taper(dyn_state%grid%km)
    1100             :    real(r8) :: ptapk, xdlt2
    1101             :    real(r8), parameter :: ptap =9000._r8
    1102             :    real(r8), parameter :: dptap=1000._r8
    1103             :    real(r8), parameter :: tiny=.1e-10_r8
    1104             : 
    1105             :    ! AM diagnostics
    1106             :    logical  :: am_diag                  ! enable angular momentum diagnostic output
    1107             :    logical  :: am_fix_out
    1108             :    integer  :: kmtp                     ! range of levels (1:kmtp) where order is reduced
    1109             :    real(r8) :: ame(dyn_state%grid%jm)
    1110             :    real(r8) :: zpe(dyn_state%grid%jfirstxy:dyn_state%grid%jlastxy)
    1111             :    real(r8) :: tmp
    1112             :    real(r8) :: du_fix_g
    1113       32256 :    real(r8) :: du_fix(dyn_state%grid%km)
    1114       32256 :    real(r8) :: du_fix_s(dyn_state%grid%km)
    1115       16128 :    real(r8), allocatable :: du_fix_i(:,:,:)
    1116       16128 :    real(r8), allocatable :: du_k    (:,:)
    1117       16128 :    real(r8), allocatable :: du_north(:,:)
    1118       16128 :    real(r8), allocatable :: uc_s(:,:,:),vc_s(:,:,:)  ! workspace (accumulated uc,vc)
    1119       16128 :    real(r8), allocatable :: uc_i(:,:,:),vc_i(:,:,:)  ! workspace (transposed uc_s,vc_s)
    1120             : 
    1121             : 
    1122             :    ! NOTE -- model behaviour with high_order_top=true is still under validation and may require
    1123             :    !         some other form of enhanced damping in the top layer
    1124             :    logical :: high_order_top
    1125             : 
    1126             :    !--------------------------------------------------------------------------------------
    1127       16128 :    kmtp=dyn_state%grid%km/8
    1128             : 
    1129       16128 :    rc       =  DYN_RUN_FAILURE      ! Set initially to fail
    1130             : 
    1131       16128 :    phisxy   => dyn_in%phis
    1132       16128 :    psxy     => dyn_in%ps
    1133       16128 :    uxy      => dyn_in%u3s
    1134       16128 :    vxy      => dyn_in%v3s
    1135       16128 :    t3xy     => dyn_in%t3
    1136       16128 :    ptxy     => dyn_in%pt
    1137       16128 :    delpxy   => dyn_in%delp
    1138       16128 :    tracer   => dyn_in%tracer
    1139       16128 :    pexy     => dyn_in%pe
    1140       16128 :    pkxy     => dyn_in%pk
    1141       16128 :    pkzxy    => dyn_in%pkz
    1142             : 
    1143       16128 :    pelnxy   => dyn_out%peln
    1144       16128 :    omgaxy   => dyn_out%omga
    1145       16128 :    mfxxy    => dyn_out%mfx
    1146       16128 :    mfyxy    => dyn_out%mfy
    1147       16128 :    duxy     => dyn_out%du3s
    1148       16128 :    dvxy     => dyn_out%dv3s
    1149       16128 :    ucxy     => dyn_out%dua3s
    1150       16128 :    vcxy     => dyn_out%dva3s
    1151       16128 :    dufix_xy => dyn_out%duf3s
    1152             : 
    1153       16128 :    grid => dyn_state%grid    ! For convenience
    1154       16128 :    constants => DYN_STATE%CONSTANTS
    1155             : 
    1156       16128 :    ns   = dyn_state%nsplit   ! large split (will be subdivided later)
    1157       16128 :    n2   = dyn_state%nspltrac ! tracer split(will be subdivided later)
    1158       16128 :    nv   = dyn_state%nspltvrm ! vertical re-mapping split
    1159       16128 :    icd  = dyn_state%icd
    1160       16128 :    jcd  = dyn_state%jcd
    1161       16128 :    iord = dyn_state%iord
    1162       16128 :    jord = dyn_state%jord
    1163       16128 :    kord = dyn_state%kord
    1164       16128 :    div24del2flag = dyn_state%div24del2flag
    1165       16128 :    del2coef      = dyn_state%del2coef
    1166       16128 :    filtcw        = dyn_state%filtcw
    1167       16128 :    high_alt      = grid%high_alt
    1168             : 
    1169       16128 :    consv      = dyn_state%consv
    1170       16128 :    high_order_top= dyn_state%high_order_top
    1171       16128 :    am_correction = dyn_state%am_correction
    1172       16128 :    am_geom_crrct = dyn_state%am_geom_crrct
    1173       16128 :    am_fixer   = dyn_state%am_fixer
    1174       16128 :    am_fix_lbl = dyn_state%am_fix_lbl
    1175       16128 :    am_diag    = dyn_state%am_diag
    1176             : 
    1177       16128 :    pi   =  constants%pi
    1178       16128 :    om   =  constants%omega
    1179       16128 :    ae   =  constants%ae
    1180       16128 :    rair =  constants%rair
    1181       16128 :    cp   =  constants%cp
    1182       16128 :    cappa=  constants%cappa
    1183       16128 :    zvir =  constants%zvir
    1184             : 
    1185       16128 :    im = grid%im
    1186       16128 :    jm = grid%jm
    1187       16128 :    km = grid%km
    1188             : 
    1189       16128 :    ng_d  = grid%ng_d
    1190       16128 :    ng_s  = grid%ng_s
    1191             : 
    1192       16128 :    ifirstxy = grid%ifirstxy
    1193       16128 :    ilastxy  = grid%ilastxy
    1194       16128 :    jfirstxy = grid%jfirstxy
    1195       16128 :    jlastxy  = grid%jlastxy
    1196             : 
    1197       16128 :    jfirst   = grid%jfirst
    1198       16128 :    jlast    = grid%jlast
    1199       16128 :    kfirst   = grid%kfirst
    1200       16128 :    klast    = grid%klast
    1201             : 
    1202       16128 :    ntotq    = grid%ntotq
    1203       16128 :    modc_tracers = grid%modc_tracers
    1204             : 
    1205       16128 :    npr_z    = grid%npr_z
    1206             : 
    1207             :    ! cd_core/trac2d overlap and tracer decomposition
    1208       16128 :    ct_overlap  = grid%ct_overlap
    1209       16128 :    trac_decomp = grid%trac_decomp
    1210       16128 :    jfirstct    = grid%jfirstct
    1211       16128 :    jlastct     = grid%jlastct
    1212       16128 :    kfirstct    = grid%kfirstct
    1213       16128 :    klastct     = grid%klastct
    1214       16128 :    commnyz     = grid%commnyz
    1215       16128 :    iamlocal    = grid%iam
    1216             : 
    1217             :    ! kaux is an index describing the set of npes_yz processes; 0 for first set, 1 for second set, etc.
    1218       16128 :    kaux = iamlocal/grid%npes_yz
    1219             : 
    1220             :    ! ct_aux is true if current process is auxiliary, false otherwise
    1221             :    ct_aux = ((ct_overlap .gt. 0 .and. kaux .eq. 1) .or.      &
    1222       16128 :              (trac_decomp .gt. 1 .and. kaux .ge. 1 .and. kaux .lt. trac_decomp))
    1223             : 
    1224             :    ! define message tags to exceed npes_xy so as not to interfere with geopotential transpose tags
    1225             :    ! tags below correspond to communicated variables with ct_overlap and trac_decomp
    1226       16128 :    dp0tag  = grid%npes_xy + 5
    1227       16128 :    cxtag   = dp0tag + 1
    1228       16128 :    cytag   = dp0tag + 2
    1229       16128 :    mfxtag  = dp0tag + 3
    1230       16128 :    mfytag  = dp0tag + 4
    1231       16128 :    tractag = dp0tag + 5
    1232             : 
    1233             :    ! ntg0 is upper bound on number of needed tags beyond tracer tags for ct_overlap and trac_decomp
    1234       16128 :    ntg0 = 10
    1235             : 
    1236             :    ! set am_fix tapering parameters
    1237       16128 :    if (am_fixer.and..not.am_fix_lbl) then
    1238           0 :       am_fix_taper = .true.   ! always apply tapering with global fixer
    1239           0 :       ptapk        = ptap**constants%cappa
    1240           0 :       xdlt2        = 2._r8/(log((ptap+.5_r8*dptap)/(ptap-.5_r8*dptap))*constants%cappa)
    1241             :    end if
    1242             : 
    1243             : #if ( defined OFFLINE_DYN )
    1244             : 
    1245             :    ! advance the meteorology data
    1246             :    call advance_met(grid)
    1247             : 
    1248             :    ! set the staggered winds (verticity winds) to offline meteorological data
    1249             :    call get_us_vs( grid, u, v )
    1250             : #endif
    1251             : 
    1252       16128 :    if (high_alt) then
    1253           0 :       call cam_thermo_calc_kappav( tracer, cap3v, cpv=cp3v )
    1254             :    else
    1255    85817088 :       cp3v  = cp
    1256    81677568 :       cp3vc = cp
    1257    85817088 :       cap3v = cappa
    1258    87042816 :       cap3vi= cappa
    1259    81677568 :       cap3vc= cappa
    1260             :    endif
    1261             : 
    1262       16128 :    if ( km > 1 ) then         ! not shallow water equations
    1263             : 
    1264       16128 :       if (consv) then
    1265             : 
    1266           0 :          if (grid%iam .lt. grid%npes_xy) then
    1267             : 
    1268             :             ! Tests indicate that t3 does not have consistent
    1269             :             ! pole values, e.g. t3(:,1,k) are not all the same.
    1270             :             ! Not clear why this is not the case: it may be that the pole
    1271             :             ! values are not consistent on the restart file.  For the time being,
    1272             :             ! perform a parallel sum over t3 and correct the pole values
    1273             : 
    1274           0 :             if ( jfirstxy == 1 ) then
    1275           0 :                call par_xsum(grid, t3xy(:,1,:), km, XXX)
    1276           0 :                do k = 1, km
    1277           0 :                   do i = ifirstxy, ilastxy
    1278           0 :                      t3xy(i,1,k) = XXX(k) / real(im,r8)
    1279             :                   end do
    1280             :                end do
    1281             :             end if
    1282             : 
    1283           0 :             if ( jlastxy == jm ) then
    1284           0 :                call par_xsum(grid, t3xy(:,jm,:), km, XXX)
    1285           0 :                do k = 1, km
    1286           0 :                   do i = ifirstxy, ilastxy
    1287           0 :                      t3xy(i,jm,k) = XXX(k) / real(im,r8)
    1288             :                   end do
    1289             :                end do
    1290             :             end if
    1291             : 
    1292           0 :             if (consv) then
    1293             :                ! Compute globally integrated Total Energy (te0)
    1294           0 :                call t_startf ('benergy')
    1295             : 
    1296             :                call benergy(grid, uxy, vxy, t3xy, delpxy,          &
    1297             :                             tracer(:,:,:,1), pexy, pelnxy, phisxy, &
    1298           0 :                             zvir, cp,  rair, tte, te0)
    1299             : 
    1300           0 :                call t_stopf('benergy')
    1301             :             end if
    1302             : 
    1303             :          end if
    1304             :       end if
    1305             :    end if
    1306             : 
    1307             : 
    1308             :    ! Allocate temporary work arrays
    1309             :    ! Change later to use pointers for SMP performance???
    1310             :    ! (prime candidates: uc, vc, delpf)
    1311             : 
    1312       16128 :    call t_startf ('dyn_run_alloc')
    1313             : 
    1314       16128 :    if (ct_aux) then
    1315             :       ! Temporarily set subdomain limits in auxiliary process equal to those of antecedent
    1316             :       ! to allow following arrays to have proper size
    1317             :       ! (Normally, sizes of unneeded arrays for auxiliary processes will be deliberately small.)
    1318           0 :       jkstore(1) = jfirst
    1319           0 :       jkstore(2) = jlast
    1320           0 :       jkstore(3) = kfirst
    1321           0 :       jkstore(4) = klast
    1322           0 :       jfirst = jfirstct
    1323           0 :       jlast  = jlastct
    1324           0 :       kfirst = kfirstct
    1325           0 :       klast  = klastct
    1326             :    endif
    1327             : 
    1328       80640 :    allocate( worka(im,jfirst:     jlast,     kfirst:klast) )
    1329       64512 :    allocate( workb(im,jfirst:     jlast,     kfirst:klast) )
    1330       80640 :    allocate(   dp0(im,jfirst-1:   jlast,     kfirst:klast) )
    1331       64512 :    allocate(   mfx(im,jfirst:     jlast,     kfirst:klast) )
    1332       80640 :    allocate(   mfy(im,jfirst:     jlast+1,   kfirst:klast) )
    1333       80640 :    allocate(    cx(im,jfirst-ng_d:jlast+ng_d,kfirst:klast) )
    1334       64512 :    allocate(    cy(im,jfirst:     jlast+1,   kfirst:klast) )
    1335   108866688 :    dp0(:,:,:) = 0._r8
    1336    81677568 :    mfx(:,:,:) = 0._r8
    1337   108866688 :    mfy(:,:,:) = 0._r8
    1338   244812288 :    cx(:,:,:) = 0._r8
    1339   108866688 :    cy(:,:,:) = 0._r8
    1340             : 
    1341       16128 :    if (ct_aux) then
    1342             :       ! Restore subdomain limits in auxiliary process
    1343           0 :       jfirst = jkstore(1)
    1344           0 :       jlast  = jkstore(2)
    1345           0 :       kfirst = jkstore(3)
    1346           0 :       klast  = jkstore(4)
    1347             :    endif
    1348             : 
    1349       80640 :    allocate( delpf(im,jfirst-ng_d:jlast+ng_d,kfirst:klast) )
    1350       64512 :    allocate(    uc(im,jfirst-ng_d:jlast+ng_d,kfirst:klast) )
    1351       80640 :    allocate(    vc(im,jfirst-2:   jlast+2,   kfirst:klast) )
    1352       80640 :    allocate(   dpt(im,jfirst-1:   jlast+1,   kfirst:klast) )
    1353       80640 :    allocate(   dwz(im,jfirst-1:    jlast,    kfirst:klast+1) )
    1354       80640 :    allocate(   pkc(im,jfirst-1:   jlast+1,   kfirst:klast+1) )
    1355       64512 :    allocate(    wz(im,jfirst-1:   jlast+1,   kfirst:klast+1) )
    1356       80640 :    allocate(  pkcc(im,jfirst  :   jlast  ,   kfirst:klast+1) )
    1357       64512 :    allocate(   wzc(im,jfirst  :   jlast  ,   kfirst:klast+1) )
    1358       64512 :    allocate(pkkp(im,jfirst:jlast,kfirst:klast+1))
    1359       64512 :    allocate(wzkp(im,jfirst:jlast,kfirst:klast+1))
    1360       80640 :    allocate(wzxy(ifirstxy:ilastxy,jfirstxy:jlastxy,km+1))
    1361       80640 :    allocate(tempxy(ifirstxy:ilastxy,jfirstxy:jlastxy,km))
    1362       64512 :    allocate(dp0xy(ifirstxy:ilastxy,jfirstxy:jlastxy,km))
    1363       80640 :    allocate(psxy3(ifirstxy:ilastxy,jfirstxy:jlastxy,npr_z))
    1364       64512 :    allocate(phisxy3(ifirstxy:ilastxy,jfirstxy:jlastxy,npr_z))
    1365             : 
    1366             : #if ( defined OFFLINE_DYN )
    1367             :    allocate( ps_obs(im,jfirst:jlast) )
    1368             :    allocate( ps_mod(im,jfirst:jlast) )
    1369             :    allocate( u_tmp(im,jfirst-ng_d:jlast+ng_s,kfirst:klast) )
    1370             :    allocate( v_tmp(im,jfirst-ng_s:jlast+ng_d,kfirst:klast) )
    1371             : #endif
    1372             : 
    1373             : 
    1374             :    ! Allocation of tracers
    1375             : 
    1376       16128 :    if (ct_aux) then
    1377             :       ! Temporarily set subdomain limits in auxiliary process equal to those of antecedent
    1378             :       ! to allow trac2d temporary storage to have proper size
    1379           0 :       jfirst = jfirstct
    1380           0 :       jlast  = jlastct
    1381           0 :       kfirst = kfirstct
    1382           0 :       klast  = klastct
    1383             :    end if
    1384       96768 :    allocate ( q_internal(im, jfirst:jlast, kfirst:klast, ntotq) )
    1385             : 
    1386             :    ! Trac2d-related mpi quantities for ct_overlap and tracer decomposition
    1387       64512 :    allocate (ctreq(ntotq+ntg0,trac_decomp))
    1388       48384 :    allocate (ctreqs(ntotq+ntg0,trac_decomp))
    1389       48384 :    allocate (cdcreqs(trac_decomp,4))
    1390      145152 :    cdcreqs(:,:) = 0
    1391             : #if defined(SPMD)
    1392       64512 :    allocate (ctstat(MPI_STATUS_SIZE,ntotq+ntg0,trac_decomp))
    1393       48384 :    allocate (ctstats(MPI_STATUS_SIZE,ntotq+ntg0,trac_decomp))
    1394             : #endif
    1395             : 
    1396             :    ! Allocate the variables used in tapering
    1397       16128 :    if (am_fix_taper) then
    1398           0 :       allocate(zpkck(dyn_state%grid%jm,dyn_state%grid%km))
    1399             :    end if
    1400             : 
    1401             :    ! Allocate fields required for dycore diagnostic
    1402       16128 :    if (am_fixer .or. am_diag) then
    1403           0 :       allocate(du_fix_i(ifirstxy:ilastxy,jfirstxy:jlastxy,km))
    1404           0 :       allocate(du_k    (ifirstxy:ilastxy,jfirstxy:jlastxy+1))
    1405           0 :       allocate(du_north(ifirstxy:ilastxy,km))
    1406           0 :       allocate(uc_s(im,jfirst-ng_d:jlast+ng_s,kfirst:klast) )
    1407           0 :       allocate(vc_s(im,jfirst-ng_s:jlast+ng_d,kfirst:klast) )
    1408           0 :       allocate(uc_i(ifirstxy:ilastxy,jfirstxy:jlastxy,km))
    1409           0 :       allocate(vc_i(ifirstxy:ilastxy,jfirstxy:jlastxy,km))
    1410           0 :       du_fix_i(:,:,:) = 0._r8
    1411           0 :       uc_s (:,:,:)  = 0._r8
    1412           0 :       vc_s (:,:,:)  = 0._r8
    1413             :    end if
    1414             : 
    1415             :    ! Compute i.d.'s of remote processes for ct_overlap or trac_decomp
    1416       16128 :    naux = 0
    1417       16128 :    if ((ct_overlap .gt. 0 .and. kaux .lt. 2) .or.      &
    1418             :       (trac_decomp .gt. 1 .and. kaux .lt. trac_decomp)) then
    1419             : 
    1420             :       ! Identify involved processes
    1421           0 :       iremotea(:) = -1
    1422           0 :       naux = max(1,trac_decomp-1)
    1423             : 
    1424           0 :       if (kaux .eq. 0) then
    1425             : 
    1426             :          ! Primary process - identify corresponding auxiliary process(es)
    1427           0 :          do kt = 1, naux
    1428           0 :             iremotea(kt) = iamlocal + kt*grid%npes_yz
    1429           0 :             cxtaga(kt) = cxtag + (kt-1)*(ntotq+ntg0)
    1430           0 :             cytaga(kt) = cytag + (kt-1)*(ntotq+ntg0)
    1431           0 :             mfxtaga(kt) = mfxtag + (kt-1)*(ntotq+ntg0)
    1432           0 :             mfytaga(kt) = mfytag + (kt-1)*(ntotq+ntg0)
    1433             :          end do
    1434             :       else
    1435             : 
    1436             :          ! Auxiliary process - identify corresponding primary process
    1437           0 :          iremotea(1) = iamlocal - kaux*grid%npes_yz
    1438             :       end if
    1439           0 :       iremote = iremotea(1)
    1440             :       ! Message sizes
    1441           0 :       ndp0  = im*(jlast-jfirst+2       )*(klast-kfirst+1)
    1442           0 :       ncx   = im*(jlast-jfirst+2*ng_d+1)*(klast-kfirst+1)
    1443           0 :       ncy   = im*(jlast-jfirst+2       )*(klast-kfirst+1)
    1444           0 :       nmfx  = im*(jlast-jfirst+1       )*(klast-kfirst+1)
    1445           0 :       nmfy  = im*(jlast-jfirst+2       )*(klast-kfirst+1)
    1446           0 :       ntrac = im*(jlast-jfirst+1       )*(klast-kfirst+1)
    1447             :    end if
    1448             : 
    1449       16128 :    if (ct_aux) then
    1450             :       ! Restore subdomain limits in auxiliary process
    1451           0 :       jfirst = jkstore(1)
    1452           0 :       jlast  = jkstore(2)
    1453           0 :       kfirst = jkstore(3)
    1454           0 :       klast  = jkstore(4)
    1455             :    end if
    1456             : 
    1457             :    ! Set tracer limits to be supplied to trac2d (needed even without tracer decomposition)
    1458       16128 :    ktloa => grid%ktloa
    1459       16128 :    kthia => grid%kthia
    1460       16128 :    ktlo = grid%ktlo
    1461       16128 :    kthi = grid%kthi
    1462             : 
    1463       16128 :    call t_stopf  ('dyn_run_alloc')
    1464             : 
    1465             :    ! Determine splitting
    1466       16128 :    bdt = ndt
    1467             : 
    1468             :    ! Second/third level splitting (nsplit and n2 variables overloaded)
    1469       16128 :    n2     = (n2+nv   -1) / nv
    1470       16128 :    nsplit = (ns+n2*nv-1) / (n2*nv)
    1471       16128 :    dt     = bdt / real(nsplit*n2*nv,r8)
    1472             : 
    1473       16128 :    if (print_subcycling) then
    1474        1536 :       print_subcycling = .false.
    1475        1536 :       if (masterproc) then
    1476           2 :          write(iulog,*) 'FV subcycling - nv, n2, nsplit, dt = ', nv, n2, nsplit, dt
    1477           2 :          if ( (nsplit*n2*nv /= dyn_state%nsplit) .or. (n2*nv /= dyn_state%nspltrac) ) then
    1478           0 :             write(iulog,*) "ERROR:  Because of loop nesting, FV dycore can't use the specified namelist settings for subcycling"
    1479           0 :             write(iulog,*) '  The original namelist settings were:'
    1480           0 :             write(iulog,*) '  fv_nsplit   = ', dyn_state%nsplit
    1481           0 :             write(iulog,*) '  fv_nspltrac = ', dyn_state%nspltrac
    1482           0 :             if( dyn_state%nspltvrm /= 1 ) write(iulog,*) '  fv_nspltvrm = ', dyn_state%nspltvrm
    1483           0 :             write(iulog,*)
    1484           0 :             write(iulog,*) '  fv_nsplit needs to be a multiple of fv_nspltrac'
    1485           0 :             if( dyn_state%nspltvrm /= 1 ) write(iulog,*) '    which in turn needs to be a multiple of fv_nspltvrm.'
    1486           0 :             write(iulog,*) '  Suggested settings would be:'
    1487           0 :             write(iulog,*) '  fv_nsplit   = ', nsplit*n2*nv
    1488           0 :             write(iulog,*) '  fv_nspltrac = ', n2*nv
    1489           0 :             if( dyn_state%nspltvrm /= 1 ) write(iulog,*) '  fv_nspltvrm = ', nv
    1490           0 :             call endrun("Bad namelist settings for FV subcycling.")
    1491             :          end if
    1492             :       end if
    1493             :    end if
    1494             : 
    1495             :    ! IF convt_local is false, pt is updated for the next iteration of the iv=1,nv loop
    1496             :    ! On the last iteration, convt_local is set to convt
    1497       16128 :    convt_local = .false.
    1498             : 
    1499             :    ! initialise global non-conservation integrals
    1500       16128 :    am1=0._r8
    1501       16128 :    me0=1._r8
    1502             : 
    1503       16128 :    if (am_fixer.or.am_diag) then
    1504           0 :       du_fix_g        = 0._r8
    1505           0 :       du_fix(:)       = 0._r8
    1506           0 :       du_fix_s(:)     = 0._r8
    1507           0 :       dufix_xy(:,:,:) = 0._r8
    1508             :    end if
    1509             : 
    1510           0 :    if (am_diag) then
    1511           0 :       ucxy = 0._r8
    1512           0 :       vcxy = 0._r8
    1513             : 
    1514             : !$omp parallel do private(i,j,k)
    1515             :       ! store old winds to get total increments
    1516           0 :       do k = 1, km
    1517           0 :          do j = jfirstxy, jlastxy
    1518           0 :             do i = ifirstxy, ilastxy
    1519           0 :                duxy(i,j,k)=uxy(i,j,k)
    1520           0 :                dvxy(i,j,k)=vxy(i,j,k)
    1521             :             enddo
    1522             :          enddo
    1523             :       enddo
    1524             :    end if
    1525             : 
    1526             :    ! Begin vertical re-mapping sub-cycle loop
    1527       80640 :    do iv = 1, nv
    1528             : 
    1529       64512 :       if (iv == nv) convt_local = convt
    1530             : 
    1531             :       ! Transpose XY arrays to YZ
    1532       64512 :       call t_barrierf('sync_xy_to_yz_1', grid%commdyn)
    1533       64512 :       call t_startf ('xy_to_yz')
    1534             : 
    1535       64512 :       if (grid%iam .lt. grid%npes_xy) then
    1536             : 
    1537       64512 :          if (grid%twod_decomp .eq. 1) then
    1538             : 
    1539             : #if defined( SPMD )
    1540             : 
    1541             : 
    1542             : !$omp parallel do private(i,j,k)
    1543             :             ! Embed psxy and phisxy in 3D array since transpose machinery cannot handle 2D arrays
    1544      838656 :             do k = 1, npr_z
    1545     3161088 :                do j = jfirstxy, jlastxy
    1546    58834944 :                   do i = ifirstxy, ilastxy
    1547    55738368 :                      psxy3(i,j,k)   = psxy(i,j)
    1548    58060800 :                      phisxy3(i,j,k) = phisxy(i,j)
    1549             :                   end do
    1550             :                end do
    1551             :             end do
    1552             : 
    1553       64512 :             if (grid%modc_onetwo .eq. 1) then
    1554             :                call mp_sendirr(grid%commxy, grid%xy2d_to_yz2d%SendDesc,                   &
    1555             :                                grid%xy2d_to_yz2d%RecvDesc, psxy3, ps,                     &
    1556           0 :                                modc=grid%modc_dynrun )
    1557             :                call mp_recvirr(grid%commxy, grid%xy2d_to_yz2d%SendDesc,                   &
    1558             :                                grid%xy2d_to_yz2d%RecvDesc, psxy3, ps,                     &
    1559           0 :                                modc=grid%modc_dynrun )
    1560             : 
    1561             :                call mp_sendirr(grid%commxy, grid%xy2d_to_yz2d%SendDesc,                   &
    1562             :                                grid%xy2d_to_yz2d%RecvDesc, phisxy3, phis,                 &
    1563           0 :                                modc=grid%modc_dynrun )
    1564             :                call mp_recvirr(grid%commxy, grid%xy2d_to_yz2d%SendDesc,                   &
    1565             :                                grid%xy2d_to_yz2d%RecvDesc, phisxy3, phis,                 &
    1566           0 :                                modc=grid%modc_dynrun )
    1567             :             else
    1568             :                call mp_sendirr(grid%commxy, grid%xy2d_to_yz2d%SendDesc,                   &
    1569             :                                grid%xy2d_to_yz2d%RecvDesc, psxy3, ps,                     &
    1570             :                                phisxy3, phis,                                             &
    1571       64512 :                                modc=grid%modc_dynrun )
    1572             :                call mp_recvirr(grid%commxy, grid%xy2d_to_yz2d%SendDesc,                   &
    1573             :                                grid%xy2d_to_yz2d%RecvDesc, psxy3, ps,                     &
    1574             :                                phisxy3, phis,                                             &
    1575       64512 :                                modc=grid%modc_dynrun )
    1576             :             end if
    1577             : 
    1578             :             ! if OFFLINE_DYN is defined, u and v are filled at this point
    1579             : 
    1580             : #if defined( OFFLINE_DYN )
    1581             :             call mp_sendirr( grid%commxy, grid%uxy_to_u%SendDesc,                        &
    1582             :                              grid%uxy_to_u%RecvDesc, uxy, u_tmp,                         &
    1583             :                              modc=grid%modc_dynrun )
    1584             :             call mp_recvirr( grid%commxy, grid%uxy_to_u%SendDesc,                        &
    1585             :                              grid%uxy_to_u%RecvDesc, uxy, u_tmp,                         &
    1586             :                              modc=grid%modc_dynrun )
    1587             : 
    1588             :             call mp_sendirr( grid%commxy, grid%vxy_to_v%SendDesc,                        &
    1589             :                              grid%vxy_to_v%RecvDesc, vxy, v_tmp,                         &
    1590             :                              modc=grid%modc_dynrun )
    1591             :             call mp_recvirr( grid%commxy, grid%vxy_to_v%SendDesc,                        &
    1592             :                              grid%vxy_to_v%RecvDesc, vxy, v_tmp,                         &
    1593             :                              modc=grid%modc_dynrun )
    1594             : 
    1595             : !$omp parallel do private(i,j,k)
    1596             :             do k = kfirst, klast
    1597             :                do j = jfirst, jlast
    1598             :                   do i = 1, im
    1599             :                      u(i,j,k) = (1._r8-met_rlx(k))*u_tmp(i,j,k) + met_rlx(k)*u(i,j,k)
    1600             :                      v(i,j,k) = (1._r8-met_rlx(k))*v_tmp(i,j,k) + met_rlx(k)*v(i,j,k)
    1601             :                   end do
    1602             :                end do
    1603             :             end do
    1604             : #else
    1605             :             call mp_sendirr( grid%commxy, grid%uxy_to_u%SendDesc,                        &
    1606             :                              grid%uxy_to_u%RecvDesc, uxy, u,                             &
    1607       64512 :                              modc=grid%modc_dynrun )
    1608             :             call mp_recvirr( grid%commxy, grid%uxy_to_u%SendDesc,                        &
    1609             :                              grid%uxy_to_u%RecvDesc, uxy, u,                             &
    1610       64512 :                              modc=grid%modc_dynrun )
    1611             : 
    1612             :             call mp_sendirr( grid%commxy, grid%vxy_to_v%SendDesc,                        &
    1613             :                              grid%vxy_to_v%RecvDesc, vxy, v,                             &
    1614       64512 :                              modc=grid%modc_dynrun )
    1615             :             call mp_recvirr( grid%commxy, grid%vxy_to_v%SendDesc,                        &
    1616             :                              grid%vxy_to_v%RecvDesc, vxy, v,                             &
    1617       64512 :                              modc=grid%modc_dynrun )
    1618             : #endif
    1619             : 
    1620             :             call mp_sendirr( grid%commxy, grid%pexy_to_pe%SendDesc,                      &
    1621             :                              grid%pexy_to_pe%RecvDesc, pexy, pe,                         &
    1622       64512 :                              modc=grid%modc_dynrun )
    1623             :             call mp_recvirr( grid%commxy, grid%pexy_to_pe%SendDesc,                      &
    1624             :                              grid%pexy_to_pe%RecvDesc, pexy, pe,                         &
    1625       64512 :                              modc=grid%modc_dynrun )
    1626             : 
    1627             :             call mp_sendirr( grid%commxy, grid%ijk_xy_to_yz%SendDesc,                    &
    1628             :                              grid%ijk_xy_to_yz%RecvDesc, delpxy, delp,                   &
    1629       64512 :                              modc=grid%modc_dynrun )
    1630             :             call mp_recvirr( grid%commxy, grid%ijk_xy_to_yz%SendDesc,                    &
    1631             :                              grid%ijk_xy_to_yz%RecvDesc, delpxy, delp,                   &
    1632       64512 :                              modc=grid%modc_dynrun )
    1633             : 
    1634             :             call mp_sendirr( grid%commxy, grid%pkxy_to_pkc%SendDesc,                     &
    1635             :                              grid%pkxy_to_pkc%RecvDesc, pkxy, pk,                        &
    1636       64512 :                              modc=grid%modc_dynrun )
    1637             :             call mp_recvirr( grid%commxy, grid%pkxy_to_pkc%SendDesc,                     &
    1638             :                              grid%pkxy_to_pkc%RecvDesc, pkxy, pk,                        &
    1639       64512 :                              modc=grid%modc_dynrun )
    1640             : 
    1641             :             call mp_sendirr( grid%commxy, grid%ptxy_to_pt%SendDesc,                      &
    1642             :                              grid%ptxy_to_pt%RecvDesc, ptxy, pt,                         &
    1643       64512 :                              modc=grid%modc_dynrun )
    1644             :             call mp_recvirr( grid%commxy, grid%ptxy_to_pt%SendDesc,                      &
    1645             :                              grid%ptxy_to_pt%RecvDesc, ptxy, pt,                         &
    1646       64512 :                              modc=grid%modc_dynrun )
    1647       64512 :             if (high_alt) then
    1648             :                call mp_sendirr( grid%commxy, grid%ijk_xy_to_yz%SendDesc,                    &
    1649             :                                 grid%ijk_xy_to_yz%RecvDesc, cp3v, cp3vc,                    &
    1650           0 :                                 modc=grid%modc_dynrun )
    1651             :                call mp_recvirr( grid%commxy, grid%ijk_xy_to_yz%SendDesc,                    &
    1652             :                                 grid%ijk_xy_to_yz%RecvDesc, cp3v, cp3vc,                    &
    1653           0 :                                 modc=grid%modc_dynrun )
    1654             :                call mp_sendirr( grid%commxy, grid%ijk_xy_to_yz%SendDesc,                    &
    1655             :                                 grid%ijk_xy_to_yz%RecvDesc, cap3v, cap3vc,                  &
    1656           0 :                                 modc=grid%modc_dynrun )
    1657             :                call mp_recvirr( grid%commxy, grid%ijk_xy_to_yz%SendDesc,                    &
    1658             :                                 grid%ijk_xy_to_yz%RecvDesc, cap3v, cap3vc,                  &
    1659           0 :                                 modc=grid%modc_dynrun )
    1660             :             endif
    1661             : 
    1662       64512 :             if (modc_tracers .eq. 0) then
    1663           0 :                do mq = 1, ntotq
    1664           0 :                   q3xypt => tracer(:,:,:,mq)
    1665           0 :                   q3yzpt => q_internal(:,:,:,mq)
    1666             :                   call mp_sendirr( grid%commxy, grid%ijk_xy_to_yz%SendDesc,                  &
    1667             :                                    grid%ijk_xy_to_yz%RecvDesc, q3xypt, q3yzpt,               &
    1668           0 :                                    modc=grid%modc_dynrun )
    1669             :                   call mp_recvirr( grid%commxy, grid%ijk_xy_to_yz%SendDesc,                  &
    1670             :                                    grid%ijk_xy_to_yz%RecvDesc, q3xypt, q3yzpt,               &
    1671           0 :                                    modc=grid%modc_dynrun )
    1672             :                end do
    1673             :             else
    1674     4773888 :                do mq = 1, ntotq, modc_tracers
    1675     4709376 :                   mlast = min(mq+modc_tracers-1,ntotq)
    1676             :                   call mp_sendtrirr( grid%commxy, grid%ijk_xy_to_yz%SendDesc,                      &
    1677             :                                      grid%ijk_xy_to_yz%RecvDesc, tracer, q_internal, mq, mlast, ntotq,    &
    1678             :                                      grid%ifirstxy, grid%ilastxy, grid%jfirstxy, grid%jlastxy,     &
    1679             :                                      1, grid%km,                                                   &
    1680             :                                      1, grid%im, grid%jfirst, grid%jlast, grid%kfirst, grid%klast, &
    1681     4709376 :                                      modc=grid%modc_tracer )
    1682             :                   call mp_recvtrirr( grid%commxy, grid%ijk_xy_to_yz%SendDesc,                      &
    1683             :                                      grid%ijk_xy_to_yz%RecvDesc, tracer, q_internal, mq, mlast, ntotq,    &
    1684             :                                      grid%ifirstxy, grid%ilastxy, grid%jfirstxy, grid%jlastxy,     &
    1685             :                                      1, grid%km,                                                   &
    1686             :                                      1, grid%im, grid%jfirst, grid%jlast, grid%kfirst, grid%klast, &
    1687     4773888 :                                      modc=grid%modc_tracer )
    1688             :                end do
    1689             :             end if
    1690             : 
    1691             : #else
    1692             :             write(iulog,*)'DYN_COMP:dyn_run -- SPMD must be defined for 2D decomp -- returning'
    1693             :             rc = DYN_RUN_MUST_BE_2D_DECOMP
    1694             :             return    ! Not possible to have 2D decomposition with SPMD undefined
    1695             : #endif
    1696             :          else ! if not twod_decomp
    1697             : 
    1698           0 :             do j = jfirst, jlast
    1699           0 :                do i = 1, im
    1700           0 :                   ps(i,j)   = psxy(i,j)
    1701           0 :                   phis(i,j) = phisxy(i,j)
    1702             :                end do
    1703             :             end do
    1704             : 
    1705             : !$omp parallel do private(i,j,k)
    1706           0 :             do j = jfirst, jlast
    1707           0 :                do k = 1, km+1
    1708           0 :                   do i = 1, im
    1709           0 :                      pe(i,k,j) = pexy(i,k,j)
    1710             :                   end do
    1711             :                end do
    1712             :             end do
    1713             : 
    1714             : !$omp parallel do private(i,j,k)
    1715           0 :             do k = 1, km+1
    1716           0 :                do j = jfirst, jlast
    1717           0 :                   do i = 1, im
    1718           0 :                      pk(i,j,k) = pkxy(i,j,k)
    1719             :                   end do
    1720             :                end do
    1721             :             end do
    1722             : 
    1723             : !$omp parallel do private(i,j,k)
    1724           0 :             do k = 1, km
    1725           0 :                do j = jfirst, jlast
    1726           0 :                   do i = 1, im
    1727             : #if defined( OFFLINE_DYN )
    1728             :                      u(i,j,k)    = (1._r8-met_rlx(k))*uxy(i,j,k) + met_rlx(k)*u(i,j,k)
    1729             :                      v(i,j,k)    = (1._r8-met_rlx(k))*vxy(i,j,k) + met_rlx(k)*v(i,j,k)
    1730             : #else
    1731           0 :                      u(i,j,k)    = uxy(i,j,k)
    1732           0 :                      v(i,j,k)    = vxy(i,j,k)
    1733             : #endif
    1734           0 :                      delp(i,j,k) = delpxy(i,j,k)
    1735           0 :                      pt(i,j,k)   = ptxy(i,j,k)
    1736             :                   end do
    1737             :                end do
    1738             :             end do
    1739           0 :             if (high_alt) then
    1740             : !$omp parallel do private(i,j,k)
    1741           0 :                do k = 1, km
    1742           0 :                   do j = jfirst, jlast
    1743           0 :                      do i = 1, im
    1744           0 :                         cp3vc(i,j,k) = cp3v(i,j,k)
    1745           0 :                         cap3vc(i,j,k) = cap3v(i,j,k)
    1746             :                      end do
    1747             :                   end do
    1748             :                end do
    1749             :             endif
    1750             : 
    1751           0 :             do mq = 1, ntotq
    1752             : 
    1753             :                ! For now just copy in the contents of tracer; later, use pointers
    1754             :                ! TODO:  q_internal(mq) => tracer(mq)    ! Make sure not to allocate q_internal in this case
    1755             : 
    1756           0 :                q_internal(1:im,jfirst:jlast,kfirst:klast,mq) = &
    1757           0 :                    tracer(1:im,jfirst:jlast,kfirst:klast,mq)
    1758             :             end do
    1759             : 
    1760             :          end if   ! (grid%twod_decomp .eq. 1)
    1761             : 
    1762             :       end if      ! (grid%iam .lt. grid%npes_xy)
    1763             : 
    1764             : #if defined(SPMD)
    1765             : 
    1766             :       ! Send tracers to auxiliary processes when overlapping
    1767       64512 :       if (ct_overlap .gt. 0 .and. n2 .gt. 1 .and. kaux .eq. 0) then
    1768           0 :          do iq = 1, ntotq
    1769           0 :             call mpiisend(q_internal(:,:,:,iq), ntrac, mpir8, iremote, tractag+iq-1, commnyz, ctreqs(5+iq,1))
    1770             :          end do
    1771             :       end if
    1772             : 
    1773             :       ! Send tracers to auxiliary processes when decomposing
    1774       64512 :       if (trac_decomp .gt. 1 .and. kaux .eq. 0) then
    1775           0 :          do kt = 2, trac_decomp
    1776           0 :             do iq = ktloa(kt), kthia(kt)
    1777           0 :                tagu = tractag+iq-1 + (kt-2)*(ntotq+ntg0)
    1778           0 :                call mpiisend(q_internal(:,:,:,iq), ntrac, mpir8, iremotea(kt-1), tagu, commnyz, ctreqs(5+iq,kt-1))
    1779             :             end do
    1780             :          end do
    1781             :       end if
    1782             : #endif
    1783             : 
    1784       64512 :       call t_stopf  ('xy_to_yz')
    1785             : 
    1786   338946048 :       omgaxy(:,:,:) = 0._r8
    1787             : 
    1788       64512 :       if (am_fixer .or. am_diag) then
    1789           0 :          du_fix_s (:)  = 0._r8
    1790           0 :          uc_s (:,:,:)  = 0._r8
    1791           0 :          vc_s (:,:,:)  = 0._r8
    1792             :       endif
    1793             : 
    1794             :       ! Begin tracer sub-cycle loop
    1795      129024 :       do n = 1, n2
    1796             : 
    1797       64512 :          if (ntotq > 0) then
    1798             : 
    1799       64512 :             call t_barrierf('sync_small_ts_init', grid%commdyn)
    1800       64512 :             call t_startf('small_ts_init')
    1801             : 
    1802             : !$omp parallel do private(i, j, k)
    1803      440832 :             do k = kfirst, klast
    1804     1569792 :                do j = jfirst, jlast
    1805   326645760 :                   do i = 1, im
    1806             :                      ! Save initial delp field before the small-time-step
    1807             :                      ! Initialize the CFL number accumulators: (cx, cy)
    1808             :                      ! Initialize total mass fluxes: (mfx, mfy)
    1809   325140480 :                      dp0(i,j,k) = delp(i,j,k)
    1810   325140480 :                      cx(i,j,k) = 0._r8
    1811   325140480 :                      cy(i,j,k) = 0._r8
    1812   325140480 :                      mfx(i,j,k) = 0._r8
    1813   326269440 :                      mfy(i,j,k) = 0._r8
    1814             :                   end do
    1815             :                end do
    1816             :             end do
    1817             : 
    1818             : #if defined( SPMD )
    1819       64512 :             if (grid%iam .lt. grid%npes_yz) then
    1820             :                call mp_send4d_ns( grid%commyz, im, jm, km,                     &
    1821       64512 :                                   1, jfirst, jlast, kfirst, klast, 1, 0, dp0 )
    1822             :                call mp_recv4d_ns( grid%commyz, im, jm, km,                     &
    1823       64512 :                                   1, jfirst, jlast, kfirst, klast, 1, 0, dp0 )
    1824             :             end if
    1825             : #endif
    1826             : 
    1827       64512 :             call t_stopf  ('small_ts_init')
    1828             : 
    1829             :          end if  ! (ntotq > 0)
    1830             : 
    1831             : #if defined(SPMD)
    1832             : 
    1833             :          ! Send dp0 to auxiliary processes when overlapping or tracer decomposition
    1834       64512 :          if (kaux .eq. 0) then
    1835       64512 :             if (ct_overlap .gt. 0 .and. n .lt. n2) then
    1836           0 :                call mpiisend(dp0, ndp0, mpir8, iremote, dp0tag, commnyz, ctreqs(1,1))
    1837             :             end if
    1838             : 
    1839       64512 :             if (trac_decomp .gt. 1) then
    1840           0 :                do kt = 2, trac_decomp
    1841           0 :                   tagu = dp0tag + (kt-2)*(ntotq+ntg0)
    1842           0 :                   call mpiisend(dp0, ndp0, mpir8, iremotea(kt-1), tagu, commnyz, ctreqs(1,kt-1))
    1843             :                end do
    1844             :             end if
    1845             :          end if
    1846             : #endif
    1847             : 
    1848             :          ! Begin dynamics sub-cycle loop
    1849      322560 :          do it = 1, nsplit
    1850             : 
    1851      258048 :             if (it == nsplit .and. n == n2) then
    1852       64512 :                ipe = 1                     ! end of cd_core; output pexy for te_map
    1853      193536 :             else if (it == 1 .and. n == 1) then
    1854       64512 :                ipe = -1                    ! start of cd_core
    1855             :             else
    1856      129024 :                ipe = 0
    1857             :             end if
    1858             : 
    1859             :             ! determine whether this is the second to last call to cd_core or not
    1860      258048 :             cd_penul = .false.
    1861      258048 :             if ( nsplit > 1 ) then
    1862      258048 :                if ( (n == n2) .and. (it == nsplit-1) ) cd_penul = .true.
    1863           0 :             else if ( n2 > 1 ) then
    1864           0 :                if ( n == n2-1 ) cd_penul = .true.
    1865             :             end if
    1866             : 
    1867             :             if (cd_penul) then
    1868       64512 :                if (ipe == -1) then
    1869           0 :                   ipe = -2   ! second to last is also the first
    1870             :                else
    1871       64512 :                   ipe = 2
    1872             :                end if
    1873             :             end if
    1874             : 
    1875             :             ! s_trac is true if cd_core is to post sends for ct_overlap or trac_decomp
    1876             :             ! such sends are posted during last inner cd_core subcycle
    1877             :             s_trac = ((ct_overlap .gt. 0 .and. it .eq. nsplit .and. n .lt. n2) .or.     &
    1878      258048 :                       (trac_decomp .gt. 1 .and. it .eq. nsplit))
    1879             : 
    1880             : 
    1881      258048 :             if ((it == nsplit) .and. (n == n2) .and. (iv == nv)) then
    1882             : !$omp parallel do private(j)
    1883       64512 :                do j = jfirstxy, jlastxy
    1884    85946112 :                   pexy_om(ifirstxy:ilastxy,1:km+1,j) = pexy(ifirstxy:ilastxy,1:km+1,j)
    1885             :                end do
    1886             :             end if
    1887             : 
    1888             :             ! Call the Lagrangian dynamical core using small tme step
    1889             : 
    1890      258048 :             call t_barrierf('sync_cd_core', grid%commdyn)
    1891      258048 :             call t_startf ('cd_core')
    1892             : 
    1893      258048 :             if (grid%iam .lt. grid%npes_yz) then
    1894      258048 :                am_fix_out = am_fixer .or. am_diag
    1895             :                call cd_core(grid,   nx,     u,   v,   pt,                    &
    1896             :                             delp,   pe,     pk,  nsplit,  dt,                &
    1897             :                             ptop,   umax,   pi, ae,                          &
    1898             :                             cp3vc,  cap3vc, cp3v, cap3v,                     &
    1899             :                             icd,    jcd, iord, jord,   ipe,                  &
    1900             :                             div24del2flag, del2coef,                         &
    1901             :                             om,     phis,     cx  ,  cy, mfx, mfy,           &
    1902             :                             delpf, uc, vc, pkz, dpt, worka,                  &
    1903             :                             dwz, pkc, wz,  phisxy, ptxy, pkxy,               &
    1904             :                             pexy, pkcc, wzc, wzxy, delpxy,                   &
    1905             :                             pkkp, wzkp, cx_om, cy_om, filtcw, s_trac,        &
    1906             :                             naux, ncx, ncy, nmfx, nmfy, iremotea,            &
    1907           0 :                             cxtaga, cytaga, mfxtaga, mfytaga, cdcreqs(1,1),  &
    1908      258048 :                             cdcreqs(1,2), cdcreqs(1,3), cdcreqs(1,4),        &
    1909             :                             kmtp, am_correction, am_geom_crrct, am_fix_out,  &
    1910      516096 :                             dod, don, high_order_top)
    1911             : 
    1912      516096 :                ctreqs(2,:) = cdcreqs(:,1)
    1913      516096 :                ctreqs(3,:) = cdcreqs(:,2)
    1914      516096 :                ctreqs(4,:) = cdcreqs(:,3)
    1915      516096 :                ctreqs(5,:) = cdcreqs(:,4)
    1916             :             end if !  (grid%iam .lt. grid%npes_yz)
    1917             : 
    1918      258048 :             call t_stopf  ('cd_core')
    1919             : 
    1920             :             ! AM fixer
    1921      258048 :             if (am_fixer.or.am_diag) then
    1922             : 
    1923           0 :                call t_barrierf('sync_lfix', grid%commdyn)
    1924           0 :                call t_startf ('lfix')
    1925           0 :                if (grid%iam .lt. grid%npes_yz) then
    1926             : 
    1927             :                   ! option for pressure tapering on AM fixer
    1928           0 :                   if (am_fix_taper) then
    1929           0 :                      zpkck(:,:)=0._r8
    1930             : !$omp parallel do private(j, k)
    1931           0 :                      do k=kfirst,klast
    1932           0 :                         do j = jfirst, jlast
    1933           0 :                            zpkck(j,k)=0.25_r8*sum(pkc(:,j,k))*grid%cose(j)
    1934             :                         enddo
    1935             :                      enddo
    1936           0 :                      do k=kfirst,klast
    1937           0 :                         call par_vecsum(jm, jfirst, jlast, zpkck(1:jm,k), me0, grid%comm_y, grid%npr_y)
    1938           0 :                         avgpk(k)=me0/im/sum(grid%cose)
    1939           0 :                         taper(k)=.5_r8*(1._r8+(1._r8-(ptapk/avgpk(k))**xdlt2)/(1._r8+(ptapk/avgpk(k))**xdlt2))
    1940             :                      enddo
    1941             :                   else
    1942           0 :                      do k=kfirst,klast
    1943           0 :                         taper(k)=1._r8
    1944             :                      enddo
    1945             :                   endif
    1946             : 
    1947             :                   ! always exclude fixer at top levels if top is not high order
    1948           0 :                   if (.not.high_order_top) then
    1949           0 :                      taper(1:kmtp)=0._r8
    1950             :                   endif
    1951             : 
    1952           0 :                   do k = kfirst, klast
    1953           0 :                      call par_vecsum(jm, jfirst, jlast, don(1:jm,k), am1, grid%comm_y, grid%npr_y)
    1954           0 :                      dons(k) = am1
    1955             :                   end do
    1956             : 
    1957           0 :                   do k = kfirst, klast
    1958           0 :                      call par_vecsum(jm, jfirst, jlast, dod(1:jm,k), me0, grid%comm_y, grid%npr_y)
    1959           0 :                      dods(k) = me0
    1960             :                   end do
    1961             : 
    1962           0 :                   if (am_fix_lbl) then
    1963             : !$omp parallel do private(i, j, k)
    1964           0 :                      do k = kfirst, klast
    1965           0 :                         do j = jfirst, jlast
    1966           0 :                            do i = 1, im
    1967           0 :                               u(i,j,k) = u(i,j,k) - dons(k)/dods(k)*grid%cose(j) * taper(k)
    1968             :                            end do
    1969             :                         end do
    1970             :                      end do
    1971             :                   endif
    1972             : 
    1973             :                   ! diagnose du_fix
    1974             :                   if (am_fix_lbl) then ! output applied increment (tapered)
    1975             : !$omp parallel do private(k)
    1976           0 :                      do k = kfirst, klast
    1977           0 :                         du_fix_s(k)=du_fix_s(k)-dons(k)/dods(k)*taper(k)
    1978             :                      end do
    1979           0 :                   elseif(am_diag) then ! output diagnosed increment (not tapered)
    1980             : !$omp parallel do private(k)
    1981           0 :                      do k = kfirst, klast
    1982           0 :                         du_fix_s(k)=du_fix_s(k)-dons(k)/dods(k)
    1983             :                      end do
    1984             :                   endif
    1985             : 
    1986             : !$omp parallel do private(j, k)
    1987           0 :                   do k=kfirst,klast
    1988           0 :                      do j = jfirst, jlast
    1989           0 :                         don(j,k)=don(j,k)*taper(k)
    1990           0 :                         dod(j,k)=dod(j,k)*taper(k)
    1991             :                      enddo
    1992             :                   enddo
    1993           0 :                   tmpsum(1,1) = SUM(don)
    1994           0 :                   tmpsum(1,2) = SUM(dod)
    1995           0 :                   call shr_reprosum_calc(tmpsum, tmpresult, 1, 1, 2, commid=grid%commyz)
    1996           0 :                   am1 = tmpresult(1)
    1997           0 :                   me0 = max(tmpresult(2),tiny)
    1998             : 
    1999           0 :                   if (am_fixer.and.(.not.am_fix_lbl)) then
    2000             : !$omp parallel do private(i, j, k)
    2001           0 :                      do k = kfirst, klast
    2002           0 :                         do j = jfirst, jlast
    2003           0 :                            do i = 1, im
    2004           0 :                               u(i,j,k) = u(i,j,k) - am1/me0*grid%cose(j) *taper(k)
    2005             :                            end do
    2006             :                         end do
    2007             :                      end do
    2008             : 
    2009             : !$omp parallel do private(k)
    2010           0 :                      do k = kfirst, klast
    2011           0 :                         du_fix_s(k)=du_fix_s(k)-am1/me0*taper(k)
    2012             :                      end do
    2013             :                   end if  ! (am_fix_lbl)
    2014             : 
    2015           0 :                   du_fix_g =du_fix_g -am1/me0
    2016           0 :                   if (masterproc) then
    2017           0 :                      if ((it == nsplit) .and. (n == n2) .and. (iv == nv)) then
    2018           0 :                         write(iulog,'(1x,a21,1x,1x,e25.17)') "AM GLOBAL FIXER: ", du_fix_g
    2019             :                      endif
    2020             :                   endif
    2021             :                   ! the following call is blocking, but probably cheaper than 3D transposition for du_fix
    2022           0 :                   if ((it == nsplit) .and. (n == n2)) then
    2023           0 :                      call par_vecsum(km, kfirst, klast, du_fix_s, tmp, grid%comm_z, grid%npr_z, return_sum_in=.true.)
    2024             :                   endif
    2025             :                end if     ! (grid%iam .lt. grid%npes_yz)
    2026           0 :                call t_stopf  ('lfix')
    2027             : 
    2028             : !$omp parallel do private(i,j,k)
    2029           0 :                do k=kfirst,klast
    2030           0 :                   do j = jfirst, jlast
    2031           0 :                      do i=1,im
    2032           0 :                         uc_s(i,j,k)=uc_s(i,j,k)+uc(i,j,k)
    2033           0 :                         vc_s(i,j,k)=vc_s(i,j,k)+vc(i,j,k)
    2034             :                      enddo
    2035             :                   enddo
    2036             :                enddo
    2037             : 
    2038             :             end if    ! (am_fixer.or.am_diag)
    2039             : 
    2040      322560 :             if ((it == nsplit) .and. (n == n2) .and. (iv == nv)) then
    2041             : !$omp  parallel do     &
    2042             : !$omp  default(shared) &
    2043             : !$omp  private(i,j,k)
    2044       64512 :                do j = jfirstxy, jlastxy
    2045     3435264 :                   do k = 1, km
    2046    84720384 :                      do i = ifirstxy, ilastxy
    2047    81285120 :                         omgaxy(i,k,j) = omgaxy(i,k,j) + 0.5_r8*(pexy(i,k,j) + pexy(i,k+1,j) - &
    2048   165957120 :                                         pexy_om(i,k,j) - pexy_om(i,k+1,j))/dt
    2049             :                      end do
    2050             :                   end do
    2051     3499776 :                   do k = 1, km+1
    2052    85929984 :                      do i = ifirstxy, ilastxy
    2053    85881600 :                         pexy_om(i,k,j) = 0.5_r8*(pexy_om(i,k,j) + pexy(i,k,j))
    2054             :                      end do
    2055             :                   end do
    2056             :                end do
    2057             : 
    2058             :                !-----------------------------------------------------
    2059             :                ! Add the v*grad(p) term to omega (dp/dt) for physics
    2060             :                !-----------------------------------------------------
    2061       16128 :                call t_startf ('vdot_gradp')
    2062       16128 :                if (grid%iam .lt. grid%npes_xy) then
    2063       16128 :                   call compute_vdot_gradp( grid, dt, dt/dt, cx_om, cy_om, pexy_om, omgaxy )
    2064             :                end if
    2065       16128 :                call t_stopf  ('vdot_gradp')
    2066             : 
    2067             :             end if
    2068             : 
    2069             :          end do  !  it = 1, nsplit - dynamics sub-cycle loop
    2070             : 
    2071      129024 :          if (ntotq .ne. 0) then
    2072             : 
    2073             : #if ( defined OFFLINE_DYN )
    2074             :             if (met_fix_mass) then
    2075             :                ps_mod(:,:) = ps(:,:)
    2076             :                ! get the observed PS interpolated to current substep
    2077             :                call get_met_fields(grid, ps_obs, n2, n)
    2078             : 
    2079             :                ! adjust mass fluxes and edge pressures to be consistent with observed PS
    2080             :                call adjust_press(grid, ps_mod, ps_obs, mfx, mfy, pexy)
    2081             : 
    2082             :                if (high_alt) then
    2083             : !$omp parallel do private(i,j,k)
    2084             :                   do k=2,km
    2085             :                      do j=jfirstxy,jlastxy
    2086             :                         do i=ifirstxy,ilastxy
    2087             :                            cap3vi(i,j,k) = 0.5_r8*(cap3v(i,j,k-1)+cap3v(i,j,k))
    2088             :                         enddo
    2089             :                      enddo
    2090             :                   enddo
    2091             :                   cap3vi(:,:,1) = 1.5_r8 * cap3v(:,:,1) - 0.5_r8 * cap3v(:,:,2)
    2092             :                   cap3vi(:,:,km+1) = 1.5_r8 * cap3v(:,:,km) - 0.5_r8 * cap3v(:,:,km-1)
    2093             :               endif
    2094             : !$omp parallel do private(i,j,k)
    2095             :                ! make pkxy consistent with the adjusted pexy
    2096             :                do i = ifirstxy, ilastxy
    2097             :                   do j = jfirstxy, jlastxy
    2098             :                      do k = 1, km+1
    2099             :                         pkxy(i,j,k) = pexy(i,k,j)**cap3vi(i,j,k)
    2100             :                      end do
    2101             :                   end do
    2102             :                end do
    2103             : 
    2104             : !$omp parallel do private(i,j,k)
    2105             :                ! adjust courant numbers to be consistent with the adjusted mass fluxes
    2106             :                do i = 1, im
    2107             :                   do j = jfirst, jlast
    2108             :                      do k = kfirst, klast
    2109             :                         if (i .ne. 1) cx(i,j,k) = mfx(i,j,k)/(0.5_r8*(dp0(i-1,j,k)+dp0(i,j,k)))
    2110             :                         if (i .eq. 1) cx(i,j,k) = mfx(i,j,k)/(0.5_r8*(dp0(1,j,k)+dp0(im,j,k)))
    2111             :                      end do
    2112             :                   end do
    2113             :                end do
    2114             : 
    2115             : !$omp parallel do private(i,j,k)
    2116             :                do i = 1, im
    2117             :                   do j = jfirst, jlast
    2118             :                      do k = kfirst, klast
    2119             :                         if ((j .gt. 1) .and. (j .lt. jm)) cy(i,j,k) = &
    2120             :                            mfy(i,j,k)/(0.5_r8*(dp0(i,j-1,k)+dp0(i,j,k)))/grid%cose(j)
    2121             :                      end do
    2122             :                   end do
    2123             :                end do
    2124             :             end if
    2125             : #endif
    2126             : 
    2127             :             ! WS 2006-12-04 : this seems like the safest place to preprocess and
    2128             :             !                 transpose the C-grid mass-flux and later the
    2129             :             !                 Courant numbers for potential output
    2130             : 
    2131             :             ! Horizontal mass fluxes
    2132             : 
    2133       64512 :             if (grid%iam .lt. grid%npes_xy) then
    2134             : 
    2135       64512 :                if (grid%twod_decomp .eq. 1) then
    2136             : #if defined( SPMD )
    2137             : !$omp parallel do private(i,j,k)
    2138      440832 :                   do k = kfirst, klast
    2139     1569792 :                      do j = jfirst, jlast
    2140   326645760 :                         do i = 1, im
    2141   325140480 :                            worka(i,j,k) = mfx(i,j,k)*(ae*grid%dp)*(grid%dl*ae*grid%cosp(j))/(ndt) ! Pa m^2/s
    2142   326269440 :                            workb(i,j,k) = mfy(i,j,k)*(grid%dl*ae*grid%cosp(j))*(ae*grid%dp)/(ndt*grid%cose(j)) ! Pa m^2 / s
    2143             :                         end do
    2144             :                      end do
    2145             :                   end do
    2146       64512 :                   if (grid%modc_onetwo .eq. 1) then
    2147             :                      call mp_sendirr( grid%commxy, grid%ijk_yz_to_xy%SendDesc,                  &
    2148             :                                       grid%ijk_yz_to_xy%RecvDesc, worka, mfxxy,                 &
    2149           0 :                                       modc=grid%modc_dynrun )
    2150             :                      call mp_recvirr( grid%commxy, grid%ijk_yz_to_xy%SendDesc,                  &
    2151             :                                       grid%ijk_yz_to_xy%RecvDesc, worka, mfxxy,                 &
    2152           0 :                                       modc=grid%modc_dynrun )
    2153             : 
    2154             :                      call mp_sendirr( grid%commxy, grid%ijk_yz_to_xy%SendDesc,                  &
    2155             :                                       grid%ijk_yz_to_xy%RecvDesc, workb, mfyxy,                 &
    2156           0 :                                       modc=grid%modc_dynrun )
    2157             :                      call mp_recvirr( grid%commxy, grid%ijk_yz_to_xy%SendDesc,                  &
    2158             :                                       grid%ijk_yz_to_xy%RecvDesc, workb, mfyxy,                 &
    2159           0 :                                       modc=grid%modc_dynrun )
    2160             :                   else
    2161             :                      call mp_sendirr( grid%commxy, grid%ijk_yz_to_xy%SendDesc,                  &
    2162             :                                       grid%ijk_yz_to_xy%RecvDesc, worka, mfxxy,                 &
    2163             :                                       workb, mfyxy,                                             &
    2164       64512 :                                       modc=grid%modc_dynrun )
    2165             :                      call mp_recvirr( grid%commxy, grid%ijk_yz_to_xy%SendDesc,                  &
    2166             :                                       grid%ijk_yz_to_xy%RecvDesc, worka, mfxxy,                 &
    2167             :                                       workb, mfyxy,                                             &
    2168       64512 :                                       modc=grid%modc_dynrun )
    2169             :                   end if
    2170             : 
    2171             : #else
    2172             :                   write(iulog,*)'DYN_COMP:dyn_run -- SPMD must be defined for 2D decomp -- returning'
    2173             :                   rc = DYN_RUN_MUST_BE_2D_DECOMP
    2174             :                   return    ! Not possible to have 2D decomposition with SPMD undefined
    2175             : #endif
    2176             :                else   ! if not twod_decomp   (1D or sequential)
    2177             : !$omp parallel do private(i,j,k)
    2178           0 :                   do k = kfirst, klast
    2179           0 :                      do j = jfirst, jlast
    2180           0 :                         do i = 1, im
    2181           0 :                            mfxxy(i,j,k) = mfx(i,j,k)*(grid%dl*ae*grid%cosp(j))*(ae*grid%dp)/(ndt*grid%cose(j)) ! Pa m^2 / s
    2182           0 :                            mfyxy(i,j,k) = mfy(i,j,k)*(grid%dl*ae*grid%cosp(j))*(ae*grid%dp)/(ndt*grid%cose(j)) ! Pa m^2 / s
    2183             :                         end do
    2184             :                      end do
    2185             :                   end do
    2186             : 
    2187             :                end if
    2188             : 
    2189             :             end if  !  (grid%iam .lt. grid%npes_xy)
    2190             : 
    2191             : 
    2192             :             ! Perform large-tme-step scalar transport using the accumulated CFL and
    2193             :             ! mass fluxes
    2194             : 
    2195       64512 :             call t_barrierf('sync_trac2d', grid%commdyn)
    2196       64512 :             call t_startf ('trac2d')
    2197             : 
    2198             :             ! Overlap trac2d with subsequent cd_core set, or decompose over tracers
    2199             : 
    2200       64512 :             if ((ct_overlap .gt. 0 .and. n .lt. n2 .and. kaux .lt. 2) .or.   &
    2201             :                (trac_decomp .gt. 1 .and. kaux .lt. trac_decomp)) then
    2202             : 
    2203           0 :                if (kaux .eq. 0) then
    2204             : 
    2205             :                   ! Primary process
    2206             : 
    2207             :                   ! Send data to auxiliary yz decomposition
    2208             :                   ! Communicate tracers on first subcycle only
    2209             :                   ! Also post receive of new tracer values from aux processes
    2210             : 
    2211             : #if defined(SPMD)
    2212           0 :                   if (n .eq. 1) then
    2213             : 
    2214             :                      ! Block on send of tracers to aux
    2215           0 :                      if (ct_overlap .gt. 0) then
    2216           0 :                         do iq = 1, ntotq
    2217           0 :                            call mpiwait(ctreqs(5+iq,1), ctstats(1,5+iq,1))
    2218             :                         end do
    2219             :                      end if
    2220             : 
    2221           0 :                      if (trac_decomp .gt. 1) then
    2222           0 :                         do kt = 2, trac_decomp
    2223           0 :                            do iq = ktloa(kt), kthia(kt)
    2224           0 :                               call mpiwait(ctreqs(5+iq,kt-1), ctstats(1,5+iq,kt-1))
    2225             :                            enddo
    2226             :                         enddo
    2227             :                      endif
    2228             : 
    2229             :                      ! Post receive for updated tracers from aux
    2230           0 :                      if (ct_overlap .gt. 0) then
    2231           0 :                         do iq = 1, ntotq
    2232             :                            call mpiirecv(q_internal(:,:,:,iq), ntrac, mpir8, iremote,    &
    2233           0 :                                tractag+iq-1, commnyz, ctreq(iq,1))
    2234             :                         end do
    2235             :                      end if
    2236             : 
    2237           0 :                      if (trac_decomp .gt. 1) then
    2238           0 :                         do kt = 2, trac_decomp
    2239           0 :                            do iq = ktloa(kt), kthia(kt)
    2240           0 :                               tagu = tractag+iq-1 + (kt-2)*(ntotq+ntg0)
    2241           0 :                               call mpiirecv(q_internal(:,:,:,iq), ntrac, mpir8, iremotea(kt-1),   &
    2242           0 :                                             tagu, commnyz, ctreq(iq,kt-1))
    2243             :                            end do
    2244             :                         end do
    2245             :                      end if
    2246             :                   end if  !  (n .eq. 1)
    2247             : 
    2248           0 :                   if (ct_overlap .gt. 0) then
    2249             : 
    2250             :                      ! Block on send of dp0 to aux
    2251           0 :                      call mpiwait(ctreqs(1,1), ctstats(1,1,1))
    2252             :                      ! Block on sends from cd_core to aux
    2253           0 :                      call mpiwait(ctreqs(2,1), ctstats(1,2,1))
    2254           0 :                      call mpiwait(ctreqs(3,1), ctstats(1,3,1))
    2255           0 :                      call mpiwait(ctreqs(4,1), ctstats(1,4,1))
    2256           0 :                      call mpiwait(ctreqs(5,1), ctstats(1,5,1))
    2257             :                   endif
    2258             : 
    2259           0 :                   if (trac_decomp .gt. 1) then
    2260             : 
    2261           0 :                      do kt = 2, trac_decomp
    2262             :                         ! Block on send of dp0 to aux
    2263           0 :                         call mpiwait(ctreqs(1,kt-1), ctstats(1,1,kt-1))
    2264             :                         ! Block on sends from cd_core to aux
    2265           0 :                         call mpiwait(ctreqs(2,kt-1), ctstats(1,2,kt-1))
    2266           0 :                         call mpiwait(ctreqs(3,kt-1), ctstats(1,3,kt-1))
    2267           0 :                         call mpiwait(ctreqs(4,kt-1), ctstats(1,4,kt-1))
    2268           0 :                         call mpiwait(ctreqs(5,kt-1), ctstats(1,5,kt-1))
    2269             :                      end do
    2270             :                   end if
    2271             : #endif
    2272             : 
    2273             :                else
    2274             : 
    2275             :                   ! Auxiliary process
    2276             : 
    2277             :                   ! Temporarily set subdomain limits and process index in auxiliary process equal
    2278             :                   ! to those of antecedent
    2279           0 :                   jfirst = jfirstct
    2280           0 :                   jlast  = jlastct
    2281           0 :                   kfirst = kfirstct
    2282           0 :                   klast  = klastct
    2283           0 :                   grid%jfirst = jfirstct
    2284           0 :                   grid%jlast  = jlastct
    2285           0 :                   grid%kfirst = kfirstct
    2286           0 :                   grid%klast  = klastct
    2287             :                   ! Translate process index to frame of auxiliary yz decomposition for use with auxiliary
    2288             :                   !    communication in trac2d
    2289           0 :                   grid%iam = iremote
    2290             : 
    2291             : #if defined(SPMD)
    2292             :                   ! Receive data from primary yz decomposition
    2293             :                   ! Include tracers first subcycle only
    2294             : 
    2295           0 :                   if (n .eq. 1) then
    2296           0 :                      do iq = ktlo, kthi
    2297           0 :                         tagu = tractag+iq-1 + (kaux-1)*(ntotq+ntg0)
    2298           0 :                         call mpiirecv(q_internal(:,:,:,iq), ntrac, mpir8, iremote, tagu, commnyz, ctreq(5+iq,1))
    2299           0 :                         call mpiwait(ctreq(5+iq,1), ctstat(1,5+iq,1))
    2300             :                      end do
    2301             :                   end if
    2302           0 :                   tagu = dp0tag + (kaux-1)*(ntotq+ntg0)
    2303           0 :                   call mpiirecv(dp0, ndp0, mpir8, iremote, tagu, commnyz, ctreq(1,1))
    2304           0 :                   tagu = cxtag + (kaux-1)*(ntotq+ntg0)
    2305           0 :                   call mpiirecv(cx, ncx, mpir8, iremote, tagu, commnyz, ctreq(2,1))
    2306           0 :                   tagu = cytag + (kaux-1)*(ntotq+ntg0)
    2307           0 :                   call mpiirecv(cy, ncy, mpir8, iremote, tagu, commnyz, ctreq(3,1))
    2308           0 :                   tagu = mfxtag + (kaux-1)*(ntotq+ntg0)
    2309           0 :                   call mpiirecv(mfx, nmfx, mpir8, iremote, tagu, commnyz, ctreq(4,1))
    2310           0 :                   tagu = mfytag + (kaux-1)*(ntotq+ntg0)
    2311           0 :                   call mpiirecv(mfy, nmfy, mpir8, iremote, tagu, commnyz, ctreq(5,1))
    2312           0 :                   call mpiwait(ctreq(1,1), ctstat(1,1,1))
    2313           0 :                   call mpiwait(ctreq(2,1), ctstat(1,2,1))
    2314           0 :                   call mpiwait(ctreq(3,1), ctstat(1,3,1))
    2315           0 :                   call mpiwait(ctreq(4,1), ctstat(1,4,1))
    2316           0 :                   call mpiwait(ctreq(5,1), ctstat(1,5,1))
    2317             : #endif
    2318             : 
    2319             :                end if  !  (kaux .eq. 0)
    2320             : 
    2321             :             else
    2322             : 
    2323             : #if defined(SPMD)
    2324             :                ! Block on receive of updated tracers from aux (last subcycle)
    2325       64512 :                if (ct_overlap .gt. 0 .and. n .eq. n2 .and. n2 .gt. 1 .and. kaux .eq. 0) then
    2326           0 :                   do iq = 1, ntotq
    2327           0 :                      call mpiwait(ctreq(iq,1), ctstat(1,iq,1))
    2328             :                   end do
    2329             :                end if
    2330             : #endif
    2331             : 
    2332             :             end if  !  (ct_overlap .gt. 0 .and. n .lt. n2 .and. kaux .lt. 2)
    2333             :                     ! or (trac_decomp .gt. 1 .and. kaux .lt. trac_decomp)
    2334             : 
    2335             :             ! Call tracer advection
    2336             : 
    2337             :             c_dotrac = ct_overlap .gt. 0 .and.    &
    2338       64512 :                       ((n .lt. n2 .and. kaux .eq. 1) .or. (n .eq. n2 .and. kaux .eq. 0))
    2339       64512 :             t_dotrac = ct_overlap .eq. 0 .and. kaux .lt. trac_decomp
    2340       64512 :             high_alt1: if (high_alt) then
    2341             :                !
    2342             :                ! phl: overwrite last tracer with kappa
    2343             :                !
    2344             :                !$omp parallel do private(i,j,k)
    2345           0 :                do k=grid%kfirst,grid%klast
    2346           0 :                   do j=grid%jfirst,grid%jlast
    2347           0 :                      do i=1,grid%im
    2348           0 :                         q_internal(i,j,k,ntotq) = cap3vc(i,j,k)
    2349             :                      end do
    2350             :                   end do
    2351             :                end do
    2352             :             endif high_alt1
    2353       64512 :             if (c_dotrac .or. t_dotrac) then
    2354           0 :                call trac2d( grid, dp0(:,jfirst:jlast,:),    q_internal,         &
    2355             :                            cx,    cy,     mfx,    mfy,    iord,   jord,        &
    2356   653356032 :                            fill,  ktlo,   kthi,   workb,  worka  )
    2357             :             endif
    2358             : 
    2359             : #if defined(SPMD)
    2360             :             ! Return data to primary yz decomposition
    2361             :             ! For overlap, next-to-last subcycle only; for tracer decomp, last subcycle only
    2362       64512 :             if (ct_aux .and. ((ct_overlap .gt. 0 .and. n .eq. n2-1) .or.   &
    2363             :                (trac_decomp .gt. 1 .and. n .eq. n2)))  then
    2364           0 :                do iq = ktlo, kthi
    2365           0 :                   tagu = tractag+iq-1 + (kaux-1)*(ntotq+ntg0)
    2366           0 :                   call mpiisend(q_internal(:,:,:,iq), ntrac, mpir8, iremote, tagu, commnyz, ctreqs(5+iq,1))
    2367           0 :                   call mpiwait(ctreqs(5+iq,1), ctstats(1,5+iq,1))
    2368             :                end do
    2369             :             end if
    2370             : #endif
    2371             : 
    2372             : #if defined(SPMD)
    2373             :             ! For tracer decomposition, block on receive of updated tracers from aux (last subcycle)
    2374       64512 :             if (trac_decomp .gt. 1 .and. n .eq. n2 .and. kaux .eq. 0) then
    2375           0 :                do kt = 2, trac_decomp
    2376           0 :                   do iq = ktloa(kt), kthia(kt)
    2377           0 :                      call mpiwait(ctreq(iq,kt-1), ctstat(1,iq,kt-1))
    2378             :                   end do
    2379             :                end do
    2380             :             end if
    2381             : #endif
    2382             : 
    2383             :             ! Restore subdomain limits and process index in auxiliary process
    2384       64512 :             if (ct_aux) then
    2385           0 :                jfirst = jkstore(1)
    2386           0 :                jlast  = jkstore(2)
    2387           0 :                kfirst = jkstore(3)
    2388           0 :                klast  = jkstore(4)
    2389           0 :                grid%jfirst = jkstore(1)
    2390           0 :                grid%jlast  = jkstore(2)
    2391           0 :                grid%kfirst = jkstore(3)
    2392           0 :                grid%klast  = jkstore(4)
    2393           0 :                grid%iam = iamlocal
    2394             :             end if
    2395             : 
    2396             :             ! NOTE: for cd_core / trac2d overlap, tracer data is returned to primary processes
    2397             :             !   prior to n=n2 call to trac2d
    2398             : 
    2399       64512 :             call t_stopf('trac2d')
    2400             : 
    2401       64512 :             trans_pexy: if (met_fix_mass .or. high_alt) then
    2402             : 
    2403           0 :                if (grid%twod_decomp .eq. 1) then
    2404           0 :                  if (grid%iam .lt. grid%npes_yz) then
    2405             : #if defined( SPMD )
    2406             :                   call mp_sendirr( grid%commxy, grid%pexy_to_pe%SendDesc,                    &
    2407             :                                   grid%pexy_to_pe%RecvDesc, pexy, pe,                       &
    2408           0 :                                   modc=grid%modc_dynrun )
    2409             :                   call mp_recvirr( grid%commxy, grid%pexy_to_pe%SendDesc,                    &
    2410             :                                   grid%pexy_to_pe%RecvDesc, pexy, pe,                       &
    2411           0 :                                   modc=grid%modc_dynrun )
    2412             : #endif
    2413             :                  endif
    2414             :                else
    2415             : !$omp parallel do private(i,j,k)
    2416           0 :                   do j = jfirst, jlast
    2417           0 :                      do k = kfirst, klast+1
    2418           0 :                         do i = 1, im
    2419           0 :                            pe(i,k,j) = pexy(i,k,j)
    2420             :                         end do
    2421             :                      end do
    2422             :                   end do
    2423             :                end if
    2424             : 
    2425             : #if ( defined OFFLINE_DYN )
    2426             :                do j = jfirst,jlast
    2427             :                   if (klast .eq. km) ps_mod(:,j) = pe(:,km+1,j)
    2428             :                end do
    2429             : #endif
    2430             :             end if trans_pexy
    2431             : 
    2432           0 :             high_alt2: if (high_alt) then
    2433             : 
    2434             :                !+hi-waccm: perform potential temperature correction:
    2435             :                !           1. Update kappa according to new major species
    2436             :                !           2. calculate the difference between kappa from step 1 and kappa from advection
    2437             :                !           3. calculate ln(p0/p) from the most recent p
    2438             :                !           4. update pt, then transpose to ptxy.
    2439             : 
    2440             :                ! Since rairv is not defined on yz decomp, can retrieve it using cp3vc and cap3vc when needed
    2441             :                ! Also will check if mbarv is needed somewhere in dynamics.
    2442             :                ! These updates of cp3vc, cap3vc etc are currently not passed back to physics.
    2443             :                ! This update is put here, after the transpose of pexy to pe, since we need pe (on yz decomp).
    2444             : 
    2445           0 :                call cam_thermo_calc_kappav( q_internal, cap3vc )
    2446             : 
    2447             : !$omp parallel do private(i,j,k)
    2448           0 :                do k = kfirst,klast
    2449           0 :                   do j = jfirst,jlast
    2450           0 :                      do i = 1,im
    2451           0 :                         pt(i,j,k) = pt(i,j,k) * (1._r8 - &
    2452           0 :                              .5_r8*(log(pe(i,k,j))+log(pe(i,k+1,j))) * &
    2453           0 :                              (cap3vc(i,j,k)-q_internal(i,j,k,ntotq)))
    2454             :                      enddo
    2455             :                   enddo
    2456             :                enddo
    2457             : 
    2458             :             endif high_alt2
    2459             :          end if  !          if (ntotq .ne. 0) then
    2460             : 
    2461             :       end do !   do n=1, n2 - tracer sub-cycle loop
    2462             : 
    2463       64512 :       call t_barrierf('sync_yz_to_xy_1', grid%commdyn)
    2464             : 
    2465       64512 :       if (grid%iam .lt. grid%npes_xy) then
    2466             : 
    2467       64512 :          if (grid%twod_decomp .eq. 1) then
    2468             : 
    2469             :             ! Transpose ps, u, v, and tracer from yz to xy decomposition
    2470             :             !
    2471             :             ! Note: pt, pe and pk will have already been transposed through
    2472             :             ! call to geopk in cd_core. geopk does not actually require
    2473             :             ! secondary xy decomposition; direct 16-byte technique works just
    2474             :             ! as well, perhaps better. However, transpose method is used on last
    2475             :             ! call to avoid having to compute these three transposes now.
    2476             : 
    2477             : #if defined (SPMD)
    2478             : 
    2479       64512 :             call t_startf ('yz_to_xy_psuv')
    2480             : 
    2481             :             ! Transpose ps
    2482             :             ! Embed in 3D array since transpose machinery cannot handle 2D arrays
    2483             : 
    2484             :             call mp_sendirr( grid%commxy, grid%yz2d_to_xy2d%SendDesc,                   &
    2485             :                              grid%yz2d_to_xy2d%RecvDesc, ps, psxy3,                     &
    2486       64512 :                              modc=grid%modc_dynrun )
    2487             :             call mp_recvirr( grid%commxy, grid%yz2d_to_xy2d%SendDesc,                   &
    2488             :                              grid%yz2d_to_xy2d%RecvDesc, ps, psxy3,                     &
    2489       64512 :                              modc=grid%modc_dynrun )
    2490             : 
    2491             : !$omp parallel do private(i,j)
    2492      258048 :             do j = jfirstxy, jlastxy
    2493     4902912 :                do i = ifirstxy, ilastxy
    2494     4838400 :                   psxy(i,j) = psxy3(i,j,1)
    2495             :                end do
    2496             :             end do
    2497             : 
    2498             :             ! Transpose u
    2499             :             call mp_sendirr( grid%commxy, grid%u_to_uxy%SendDesc,                       &
    2500             :                              grid%u_to_uxy%RecvDesc, u, uxy,                            &
    2501       64512 :                              modc=grid%modc_dynrun )
    2502             :             call mp_recvirr( grid%commxy, grid%u_to_uxy%SendDesc,                       &
    2503             :                              grid%u_to_uxy%RecvDesc, u, uxy,                            &
    2504       64512 :                              modc=grid%modc_dynrun )
    2505             : 
    2506             :             ! Transpose v
    2507             :             call mp_sendirr( grid%commxy, grid%v_to_vxy%SendDesc,                       &
    2508             :                              grid%v_to_vxy%RecvDesc, v, vxy,                            &
    2509       64512 :                              modc=grid%modc_dynrun )
    2510             :             call mp_recvirr( grid%commxy, grid%v_to_vxy%SendDesc,                       &
    2511             :                              grid%v_to_vxy%RecvDesc, v, vxy,                            &
    2512       64512 :                              modc=grid%modc_dynrun )
    2513             : 
    2514             : 
    2515       64512 :             if (am_fixer.or.am_diag) then
    2516             : 
    2517             :                ! Transpose uc_s
    2518             :                call mp_sendirr( grid%commxy, grid%u_to_uxy%SendDesc,                       &
    2519             :                                 grid%u_to_uxy%RecvDesc, uc_s, uc_i,                        &
    2520           0 :                                 modc=grid%modc_dynrun )
    2521             :                call mp_recvirr( grid%commxy, grid%u_to_uxy%SendDesc,                       &
    2522             :                                 grid%u_to_uxy%RecvDesc, uc_s, uc_i,                        &
    2523           0 :                                 modc=grid%modc_dynrun )
    2524             : 
    2525             :                ! Transpose vc_s
    2526             :                call mp_sendirr( grid%commxy, grid%v_to_vxy%SendDesc,                       &
    2527             :                                 grid%v_to_vxy%RecvDesc, vc_s, vc_i,                        &
    2528           0 :                                 modc=grid%modc_dynrun )
    2529             :                call mp_recvirr( grid%commxy, grid%v_to_vxy%SendDesc,                       &
    2530             :                                 grid%v_to_vxy%RecvDesc, vc_s, vc_i,                        &
    2531           0 :                                 modc=grid%modc_dynrun )
    2532             :             end if
    2533             : 
    2534       64512 :             call t_stopf  ('yz_to_xy_psuv')
    2535             : 
    2536       64512 :             call t_startf ('yz_to_xy_q')
    2537             : 
    2538       64512 :             if (modc_tracers .eq. 0) then
    2539           0 :                do mq = 1, ntotq
    2540             : 
    2541             : 
    2542             :                   ! Transpose
    2543           0 :                   q3yzpt => q_internal(:,:,:,mq)
    2544           0 :                   q3xypt => dyn_out%tracer(:,:,:,mq)
    2545             :                   call mp_sendirr( grid%commxy, grid%ijk_yz_to_xy%SendDesc,                    &
    2546             :                                    grid%ijk_yz_to_xy%RecvDesc, q3yzpt, q3xypt,                 &
    2547           0 :                                    modc=grid%modc_dynrun )
    2548             :                   call mp_recvirr( grid%commxy, grid%ijk_yz_to_xy%SendDesc,                    &
    2549             :                                    grid%ijk_yz_to_xy%RecvDesc, q3yzpt, q3xypt,                 &
    2550           0 :                                    modc=grid%modc_dynrun )
    2551             :                end do
    2552             :             else
    2553     4773888 :                do mq = 1, ntotq, modc_tracers
    2554     4709376 :                   mlast = min(mq+modc_tracers-1,ntotq)
    2555             :                   call mp_sendtrirr( grid%commxy, grid%ijk_yz_to_xy%SendDesc,                     &
    2556             :                                      grid%ijk_yz_to_xy%RecvDesc, q_internal, dyn_out%tracer,      &
    2557             :                                      mq, mlast, ntotq, 1, grid%im, grid%jfirst, grid%jlast, grid%kfirst, &
    2558             :                                      grid%klast, grid%ifirstxy, grid%ilastxy, grid%jfirstxy,      &
    2559             :                                      grid%jlastxy, 1, grid%km,                                    &
    2560     4709376 :                                      modc=grid%modc_tracer )
    2561             :                   call mp_recvtrirr( grid%commxy, grid%ijk_yz_to_xy%SendDesc,                     &
    2562             :                                      grid%ijk_yz_to_xy%RecvDesc, q_internal, dyn_out%tracer,      &
    2563             :                                      mq, mlast, ntotq, 1, grid%im, grid%jfirst, grid%jlast, grid%kfirst, &
    2564             :                                      grid%klast, grid%ifirstxy, grid%ilastxy, grid%jfirstxy,      &
    2565             :                                      grid%jlastxy, 1, grid%km,                                    &
    2566     4773888 :                                      modc=grid%modc_tracer )
    2567             :                end do
    2568             :             end if
    2569             : 
    2570       64512 :             call t_stopf  ('yz_to_xy_q')
    2571             : 
    2572       64512 :             if (high_alt) then
    2573             :                ! Transpose pt (because pt correction is done after cd_core)
    2574             : 
    2575           0 :                call t_startf ('yz_to_xy_pt')
    2576             :                call mp_sendirr( grid%commxy, grid%pt_to_ptxy%SendDesc,                    &
    2577             :                     grid%pt_to_ptxy%RecvDesc, pt, ptxy,                 &
    2578           0 :                     modc=grid%modc_dynrun )
    2579             :                call mp_recvirr( grid%commxy, grid%pt_to_ptxy%SendDesc,                    &
    2580             :                     grid%pt_to_ptxy%RecvDesc, pt, ptxy,                 &
    2581           0 :                     modc=grid%modc_dynrun )
    2582           0 :                call t_stopf ('yz_to_xy_pt')
    2583             :             endif
    2584             : 
    2585             : #endif
    2586             : 
    2587             :          else
    2588             : 
    2589           0 :             call t_startf ('yz_to_xy_psuv')
    2590             : 
    2591           0 :             do j = jfirst, jlast
    2592           0 :                do i = 1, im
    2593           0 :                   psxy(i,j) = ps(i,j)
    2594             :                end do
    2595             :             end do
    2596             : 
    2597             : !$omp parallel do private(i,j,k)
    2598           0 :             do k = kfirst, klast
    2599           0 :                do j = jfirst, jlast
    2600           0 :                   do i = 1, im
    2601           0 :                      uxy(i,j,k) = u(i,j,k)
    2602           0 :                      vxy(i,j,k) = v(i,j,k)
    2603             :                   end do
    2604             :                end do
    2605             :             end do
    2606             : 
    2607           0 :             if (am_fixer.or.am_diag) then
    2608             : !$omp parallel do private(i,j,k)
    2609           0 :                do k = kfirst, klast
    2610           0 :                   do j = jfirst, jlast
    2611           0 :                      do i = 1, im
    2612           0 :                         uc_i(i,j,k)= uc_s(i,j,k)
    2613           0 :                         vc_i(i,j,k)= vc_s(i,j,k)
    2614             :                      end do
    2615             :                   end do
    2616             :                end do
    2617             :             end if
    2618             : 
    2619           0 :             if (high_alt) then
    2620             : !$omp parallel do private(i,j,k)
    2621           0 :                do k = kfirst, klast
    2622           0 :                   do j = jfirst, jlast
    2623           0 :                      do i = 1, im
    2624           0 :                         ptxy(i,j,k) = pt(i,j,k)
    2625             :                      end do
    2626             :                   end do
    2627             :                end do
    2628             :             end if
    2629             : 
    2630           0 :             call t_stopf  ('yz_to_xy_psuv')
    2631             : 
    2632           0 :             call t_startf ('yz_to_xy_q')
    2633             : 
    2634             : !$omp parallel do private(i,j,k,mq)
    2635           0 :             do mq = 1, ntotq
    2636             : 
    2637             :                ! Temporary -- here the pointers will ultimately be set, not the contents copied
    2638           0 :                do k = 1, km
    2639           0 :                   do j = jfirst, jlast
    2640           0 :                      do i = 1, im
    2641           0 :                         dyn_out%tracer(i,j,k,mq) = q_internal(i,j,k,mq)
    2642             :                      end do
    2643             :                   end do
    2644             :                end do
    2645             :             end do
    2646             : 
    2647           0 :             call t_stopf  ('yz_to_xy_q')
    2648             : 
    2649             :          end if  !  (grid%twod_decomp .eq. 1)
    2650             : 
    2651             :       end if  !  (grid%iam .lt. grid%npes_xy)
    2652             : 
    2653       80640 :       if ( km > 1 ) then           ! not shallow water equations
    2654             : 
    2655             :          ! Perform vertical remapping from Lagrangian control-volume to
    2656             :          ! the Eulerian coordinate as specified by the routine set_eta.
    2657             :          ! Note that this finite-volume dycore is otherwise independent of the vertical
    2658             :          ! Eulerian coordinate.
    2659             : 
    2660             : 
    2661             :          ! te_map requires uxy, vxy, psxy, pexy, pkxy, phisxy, q3xy, and ptxy
    2662             : 
    2663       64512 :          call t_barrierf('sync_te_map', grid%commdyn)
    2664       64512 :          call t_startf ('te_map')
    2665             : 
    2666       64512 :          if (grid%iam .lt. grid%npes_xy) then
    2667             : 
    2668             :             call te_map(grid,     consv,   convt_local, psxy, omgaxy,       &
    2669             :                         pexy,     delpxy,  pkzxy,  pkxy,   ndt,             &
    2670             :                         nx,       uxy,     vxy,    ptxy,   dyn_out%tracer,  &
    2671             :                         phisxy,   cp3v,    cap3v,  kord,   pelnxy,          &
    2672             :                         te0,      tempxy,  dp0xy,  mfxxy,  mfyxy,           &
    2673             :                         uc_i,     vc_i,  du_fix_s, du_fix_i,                &
    2674       64512 :                         am_geom_crrct, (am_fixer.or.am_diag) )
    2675             : 
    2676       64512 :             if (am_diag) then
    2677             : !$omp parallel do private(i,j,k)
    2678           0 :                 do j=jfirstxy,jlastxy
    2679           0 :                   do k=1,km
    2680           0 :                      do i=ifirstxy,ilastxy
    2681           0 :                         ucxy(i,j,k)=ucxy(i,j,k)+uc_i(i,j,k)
    2682           0 :                         vcxy(i,j,k)=vcxy(i,j,k)+vc_i(i,j,k)
    2683             :                      enddo
    2684             :                   enddo
    2685             :                enddo
    2686             :             end if
    2687             : 
    2688       64512 :             if (am_fixer .or. am_diag) then
    2689             : !$omp parallel do private(i,j,k)
    2690           0 :                do j=jfirstxy,jlastxy
    2691           0 :                   do k=1,km
    2692           0 :                      do i=ifirstxy,ilastxy
    2693           0 :                         dufix_xy(i,j,k)=dufix_xy(i,j,k)+du_fix_i(i,j,k)*grid%cose(j)
    2694             :                      enddo
    2695             :                   enddo
    2696             :                enddo
    2697             :             endif
    2698             : 
    2699       64512 :             if ( .not. convt_local ) then
    2700             : !$omp parallel do private(i,j,k)
    2701      193536 :                do j=jfirstxy,jlastxy
    2702    10354176 :                   do k=1,km
    2703   254161152 :                      do i=ifirstxy,ilastxy
    2704           0 :                         t3xy(i,j,k) = ptxy(i,j,k)*pkzxy(i,j,k)/ &
    2705   254016000 :                              (D1_0+zvir*dyn_out%tracer(i,j,k,1))
    2706             :                      end do
    2707             :                   end do
    2708             :                end do
    2709             :             end if
    2710             : 
    2711             :          end if
    2712             : 
    2713       64512 :          call t_stopf ('te_map')
    2714             : 
    2715             :       end if  !  ( km > 1 )
    2716             : 
    2717             :       ! te_map computes uxy, vxy, psxy, delpxy, pexy, pkxy, pkzxy,
    2718             :       ! pelnxy, omgaxy, tracer, ptxy, mfxxy and mfyxy
    2719             : 
    2720             :    end do  !    do iv = 1, nv - vertical re-mapping sub-cycle loop
    2721             : 
    2722             :    ! get total wind increments from dynamics timestep
    2723       16128 :    if (am_diag) then
    2724             : !$omp parallel do private(i,j,k)
    2725           0 :       do k = 1, km
    2726           0 :          do j = jfirstxy, jlastxy
    2727           0 :             do i = ifirstxy, ilastxy
    2728           0 :                duxy(i,j,k) = uxy(i,j,k) - duxy(i,j,k)
    2729           0 :                dvxy(i,j,k) = vxy(i,j,k) - dvxy(i,j,k)
    2730             :             enddo
    2731             :          enddo
    2732             :       enddo
    2733             :    end if
    2734             : 
    2735       16128 :    call t_startf ('dyn_run_dealloc')
    2736             : 
    2737       16128 :    deallocate( worka )
    2738       16128 :    deallocate( workb )
    2739       16128 :    deallocate( dp0 )
    2740       16128 :    deallocate( mfx )
    2741       16128 :    deallocate( mfy )
    2742       16128 :    deallocate(  cx )
    2743       16128 :    deallocate(  cy )
    2744       16128 :    deallocate( delpf )
    2745       16128 :    deallocate( uc    )
    2746       16128 :    deallocate( vc    )
    2747       16128 :    deallocate( dpt   )
    2748       16128 :    deallocate( dwz   )
    2749       16128 :    deallocate( pkc   )
    2750       16128 :    deallocate(  wz   )
    2751       16128 :    deallocate( pkcc )
    2752       16128 :    deallocate( wzc )
    2753       16128 :    deallocate( pkkp )
    2754       16128 :    deallocate( wzkp )
    2755       16128 :    deallocate( wzxy )
    2756       16128 :    deallocate( tempxy )
    2757       16128 :    deallocate( dp0xy )
    2758       16128 :    deallocate( psxy3 )
    2759       16128 :    deallocate( phisxy3 )
    2760       16128 :    deallocate( q_internal )
    2761       16128 :    deallocate (ctreq)
    2762       16128 :    deallocate (ctreqs)
    2763       16128 :    deallocate (cdcreqs)
    2764             : #if defined(SPMD)
    2765       16128 :    deallocate (ctstat)
    2766       16128 :    deallocate (ctstats)
    2767             : #endif
    2768             : #if ( defined OFFLINE_DYN )
    2769             :    deallocate( ps_obs )
    2770             :    deallocate( ps_mod )
    2771             :    deallocate( u_tmp )
    2772             :    deallocate( v_tmp )
    2773             : #endif
    2774             : 
    2775       16128 :    if (am_fix_taper) then
    2776           0 :       deallocate(zpkck)
    2777             :    end if
    2778       16128 :    if (am_fixer.or.am_diag) then
    2779           0 :       deallocate(du_fix_i)
    2780           0 :       deallocate(du_k)
    2781           0 :       deallocate(du_north)
    2782           0 :       deallocate(uc_s)
    2783           0 :       deallocate(vc_s)
    2784           0 :       deallocate(uc_i)
    2785           0 :       deallocate(vc_i)
    2786             :    end if
    2787             : 
    2788       16128 :    call t_stopf  ('dyn_run_dealloc')
    2789             : 
    2790       16128 :    rc = DYN_RUN_SUCCESS
    2791             : 
    2792       32256 : end subroutine dyn_run
    2793             : 
    2794             : !=============================================================================================
    2795             : 
    2796           0 : subroutine dyn_final(restart_file, dyn_state, dyn_in, dyn_out)
    2797             : 
    2798       16128 :    use dynamics_vars, only: dynamics_clean
    2799             : 
    2800             :    character(LEN=*)             , intent(IN   ) :: restart_file
    2801             :    type (T_FVDYCORE_STATE), target              :: dyn_state
    2802             :    type (dyn_import_t), intent(inout)           :: dyn_in
    2803             :    type (dyn_export_t), intent(inout)           :: dyn_out
    2804             :    !-----------------------------------------------------------------------
    2805             : 
    2806           0 :    call dynamics_clean    ( dyn_state%grid  )
    2807           0 :    call dyn_free_interface( dyn_in, dyn_out )
    2808             : 
    2809             :    !=============================================================================================
    2810             :    contains
    2811             :    !=============================================================================================
    2812             : 
    2813           0 :    subroutine dyn_free_interface ( dyn_in, dyn_out )
    2814             : 
    2815             :       ! free the dynamics import and export
    2816             : 
    2817             :       ! arguments
    2818             :       type (dyn_import_t), intent(inout) :: dyn_in
    2819             :       type (dyn_export_t), intent(inout) :: dyn_out
    2820             :       !-----------------------------------------------------------------------
    2821             : 
    2822           0 :       if ( associated(dyn_in%phis) ) deallocate( dyn_in%phis )
    2823           0 :       if ( associated(dyn_in%ps) )   deallocate( dyn_in%ps )
    2824           0 :       if ( associated(dyn_in%u3s) )  deallocate( dyn_in%u3s )
    2825           0 :       if ( associated(dyn_in%v3s) )  deallocate( dyn_in%v3s )
    2826           0 :       if ( associated(dyn_in%pe) )   deallocate( dyn_in%pe )
    2827           0 :       if ( associated(dyn_in%pt) )   deallocate( dyn_in%pt )
    2828           0 :       if ( associated(dyn_in%t3) )   deallocate( dyn_in%t3 )
    2829           0 :       if ( associated(dyn_in%pk) )   deallocate( dyn_in%pk )
    2830           0 :       if ( associated(dyn_in%pkz) )  deallocate( dyn_in%pkz )
    2831           0 :       if ( associated(dyn_in%delp) ) deallocate( dyn_in%delp )
    2832           0 :       if ( associated(dyn_in%tracer) ) deallocate( dyn_in%tracer)
    2833             : 
    2834           0 :       if ( associated(dyn_out%ps) )   nullify( dyn_out%ps )
    2835           0 :       if ( associated(dyn_out%u3s) )  nullify( dyn_out%u3s )
    2836           0 :       if ( associated(dyn_out%v3s) )  nullify( dyn_out%v3s )
    2837           0 :       if ( associated(dyn_out%pe) )   nullify( dyn_out%pe )
    2838           0 :       if ( associated(dyn_out%pt) )   nullify( dyn_out%pt )
    2839           0 :       if ( associated(dyn_out%t3) )   nullify( dyn_out%t3 )
    2840           0 :       if ( associated(dyn_out%pk) )   nullify( dyn_out%pk )
    2841           0 :       if ( associated(dyn_out%pkz) )  nullify( dyn_out%pkz )
    2842           0 :       if ( associated(dyn_out%delp) ) nullify( dyn_out%delp )
    2843           0 :       if ( associated(dyn_out%tracer) ) nullify( dyn_out%tracer )
    2844             : 
    2845           0 :       if ( associated(dyn_out%omga) ) deallocate( dyn_out%omga )
    2846           0 :       if ( associated(dyn_out%peln) ) deallocate( dyn_out%peln )
    2847           0 :       if ( associated(dyn_out%mfx) )  deallocate( dyn_out%mfx )
    2848           0 :       if ( associated(dyn_out%mfy) )  deallocate( dyn_out%mfy )
    2849             : 
    2850           0 :    end subroutine dyn_free_interface
    2851             : 
    2852             : end subroutine dyn_final
    2853             : 
    2854             : !=============================================================================================
    2855             : ! Private routines
    2856             : !=============================================================================================
    2857             : 
    2858         768 : subroutine read_inidat(dyn_in)
    2859             : 
    2860             :   ! Read initial dataset
    2861             : 
    2862             :   ! Arguments
    2863             :   type (dyn_import_t), target, intent(inout) :: dyn_in
    2864             : 
    2865             :   ! Local variables
    2866             :   integer                         :: ifirstxy, ilastxy, jfirstxy, jlastxy, km
    2867             :   integer                         :: m, ntotq
    2868             : 
    2869             :   character(len=16)               :: fieldname
    2870             : 
    2871             :   type(file_desc_t), pointer      :: fh_ini    ! PIO filehandle
    2872             : 
    2873             :   type (t_fvdycore_grid), pointer :: grid
    2874             :   ! variables for analytic initial conditions
    2875         768 :   integer,  allocatable           :: glob_ind(:)
    2876         768 :   integer,  allocatable           :: m_cnst(:)
    2877         768 :   real(r8), allocatable           :: clon_st(:)
    2878             :   integer                         :: nglon, nglat
    2879             :   integer                         :: i, j
    2880             :   integer                         :: jf, gf, uf ! First indices for setting u3s
    2881             :   integer                         :: ierr
    2882             :   integer                         :: lonid
    2883             :   integer                         :: latid
    2884             :   integer                         :: mlon ! longitude dimension length from dataset
    2885             :   integer                         :: mlat            ! latitude dimension length from dataset
    2886             :   real(r8), parameter             :: deg2rad = pi/180._r8
    2887             : 
    2888             :   character(len=*), parameter     :: sub='read_inidat'
    2889             :   !----------------------------------------------------------------------------
    2890             : 
    2891         768 :   fh_ini  => initial_file_get_id()
    2892             : 
    2893         768 :   grid     => get_dyn_state_grid()
    2894         768 :   ifirstxy =  grid%ifirstxy
    2895         768 :   ilastxy  =  grid%ilastxy
    2896         768 :   jfirstxy =  grid%jfirstxy
    2897         768 :   jlastxy  =  grid%jlastxy
    2898         768 :   km       =  grid%km
    2899         768 :   ntotq    =  grid%ntotq
    2900             : 
    2901             :   ! Set the array initial_mr assuming that constituents are initialized with mixing ratios
    2902             :   ! that are consistent with their declared type in the constituents module.  This array
    2903             :   ! may be modified below to provide backwards compatibility for reading old initial files
    2904             :   ! that contain wet mixing ratios for all constituents regardless of how they were registered.
    2905      168192 :   do i = 1, pcnst
    2906      168192 :      initial_mr(i) = cnst_type(i)
    2907             :   end do
    2908             : 
    2909         768 :   if (analytic_ic_active()) then
    2910           0 :      readvar   = .false.
    2911           0 :     if (jfirstxy == 1) then
    2912           0 :       jf = 1
    2913           0 :       uf = 2
    2914           0 :       gf = (ilastxy - ifirstxy + 1) + 1 ! Skip the first block of longitudes
    2915             :     else
    2916           0 :       jf = jfirstxy-1
    2917           0 :       uf = jfirstxy
    2918           0 :       gf = 1
    2919             :     end if
    2920           0 :     allocate(glob_ind((ilastxy - ifirstxy + 1) * (jlastxy - jfirstxy + 1)))
    2921           0 :     call get_horiz_grid_dim_d(nglon, nglat)
    2922           0 :     m = 1
    2923           0 :     do j = jfirstxy, jlastxy
    2924           0 :       do i = ifirstxy, ilastxy
    2925             :         ! Create a global column index
    2926           0 :         glob_ind(m) = i + (j-1)*nglon
    2927           0 :         m = m + 1
    2928             :       end do
    2929             :     end do
    2930           0 :     allocate(m_cnst(ntotq))
    2931           0 :     do i = 1, ntotq
    2932           0 :       m_cnst(i) = i
    2933             :     end do
    2934           0 :     allocate(clon_st(ifirstxy:ilastxy))
    2935           0 :     clon_st(ifirstxy:ilastxy) = londeg_st(ifirstxy:ilastxy,1) * deg2rad
    2936             : 
    2937           0 :     call analytic_ic_set_ic(vc_moist_pressure, clat_staggered(jf:jlastxy-1),  &
    2938           0 :          clon(ifirstxy:ilastxy,1), glob_ind(gf:), U=dyn_in%u3s(:,uf:,:))
    2939           0 :     call analytic_ic_set_ic(vc_moist_pressure, clat(jfirstxy:jlastxy),        &
    2940           0 :          clon_st(ifirstxy:ilastxy), glob_ind, V=dyn_in%v3s)
    2941             :     ! Note that analytic_ic_set_ic makes use of cnst_init_default for
    2942             :     ! the tracers except water vapor.
    2943           0 :     call analytic_ic_set_ic(vc_moist_pressure, clat(jfirstxy:jlastxy),        &
    2944           0 :          clon(ifirstxy:ilastxy,1), glob_ind, T=dyn_in%t3, PS=dyn_in%ps,       &
    2945           0 :          Q=dyn_in%tracer(:,:,:,1:ntotq), PHIS_IN=dyn_in%phis, m_cnst=m_cnst)
    2946           0 :     do m = 1, ntotq
    2947           0 :        call process_inidat(grid, dyn_in, 'CONSTS', m_cnst=m, fh_ini=fh_ini)
    2948             :     end do
    2949           0 :     deallocate(glob_ind)
    2950           0 :     deallocate(m_cnst)
    2951           0 :     deallocate(clon_st)
    2952             : 
    2953             :   else
    2954             : 
    2955             :     !-----------
    2956             :     ! Check coord sizes
    2957             :     !-----------
    2958         768 :      ierr = pio_inq_dimid(fh_ini, 'lon' , lonid)
    2959         768 :      ierr = pio_inq_dimid(fh_ini, 'lat' , latid)
    2960         768 :      ierr = pio_inq_dimlen(fh_ini, lonid , mlon)
    2961         768 :      ierr = pio_inq_dimlen(fh_ini, latid , mlat)
    2962         768 :      if (mlon /= plon .or. mlat /= plat) then
    2963           0 :         write(iulog,*) sub//': ERROR: model parameters do not match initial dataset parameters'
    2964           0 :         write(iulog,*)'Model Parameters:    plon = ',plon,' plat = ',plat
    2965           0 :         write(iulog,*)'Dataset Parameters:  dlon = ',mlon,' dlat = ',mlat
    2966           0 :         call endrun(sub//': ERROR: model parameters do not match initial dataset parameters')
    2967             :      end if
    2968             : 
    2969             :     !-----------
    2970             :     ! 2-D fields
    2971             :     !-----------
    2972             : 
    2973         768 :     fieldname = 'PS'
    2974             :     call infld(fieldname, fh_ini, 'lon', 'lat', ifirstxy, ilastxy, jfirstxy, jlastxy, &
    2975         768 :          dyn_in%ps, readvar, gridname='fv_centers')
    2976         768 :     if (.not. readvar) call endrun(sub//': ERROR: PS not found')
    2977             : 
    2978             : 
    2979             :     !-----------
    2980             :     ! 3-D fields
    2981             :     !-----------
    2982             : 
    2983         768 :     fieldname = 'US'
    2984             :     call infld(fieldname, fh_ini, 'lon', 'slat', 'lev',  ifirstxy, ilastxy, jfirstxy, jlastxy, &
    2985         768 :          1, km, dyn_in%u3s, readvar, gridname='fv_u_stagger')
    2986         768 :     if (.not. readvar) call endrun(sub//': ERROR: US not found')
    2987             : 
    2988         768 :     fieldname = 'VS'
    2989             :     call infld(fieldname, fh_ini, 'slon', 'lat', 'lev',  ifirstxy, ilastxy, jfirstxy, jlastxy, &
    2990         768 :          1, km, dyn_in%v3s, readvar, gridname='fv_v_stagger')
    2991         768 :     if (.not. readvar) call endrun(sub//': ERROR: VS not found')
    2992             : 
    2993         768 :     fieldname = 'T'
    2994             :     call infld(fieldname, fh_ini, 'lon', 'lat', 'lev', ifirstxy, ilastxy, jfirstxy, jlastxy, &
    2995         768 :          1, km, dyn_in%t3, readvar, gridname='fv_centers')
    2996         768 :     if (.not. readvar) call endrun(sub//': ERROR: T not found')
    2997             :   end if
    2998             : 
    2999             :   ! Constituents (read and process one at a time)
    3000             :   !
    3001             :   ! If analytic ICs are being used, we allow constituents in an initial
    3002             :   ! file to overwrite mixing ratios set by the default constituent initialization
    3003             :   ! except for the water species.
    3004             :   !
    3005             :   ! If using analytic ICs the initial file only needs the horizonal grid
    3006             :   ! dimension checked in the case that the file contains constituent mixing
    3007             :   ! ratios.
    3008         768 :   if (analytic_ic_active()) then
    3009           0 :      do m = 1, pcnst
    3010           0 :         if (cnst_read_iv(m) .and. .not. cnst_is_a_water_species(cnst_name(m))) then
    3011           0 :            if (dyn_field_exists(fh_ini, trim(cnst_name(m)), required=.false.)) then
    3012           0 :               ierr = pio_inq_dimid(fh_ini, 'lon' , lonid)
    3013           0 :               ierr = pio_inq_dimid(fh_ini, 'lat' , latid)
    3014           0 :               ierr = pio_inq_dimlen(fh_ini, lonid , mlon)
    3015           0 :               ierr = pio_inq_dimlen(fh_ini, latid , mlat)
    3016           0 :               if (mlon /= plon .or. mlat /= plat) then
    3017           0 :                  write(iulog,*) sub//': ERROR: model parameters do not match initial dataset parameters'
    3018           0 :                  write(iulog,*)'Model Parameters:    plon = ',plon,' plat = ',plat
    3019           0 :                  write(iulog,*)'Dataset Parameters:  dlon = ',mlon,' dlat = ',mlat
    3020           0 :                  call endrun(sub//': ERROR: model parameters do not match initial dataset parameters')
    3021             :               end if
    3022           0 :               exit
    3023             :            end if
    3024             :         end if
    3025             :      end do
    3026             :   end if
    3027             : 
    3028      168192 :   do m = 1, pcnst
    3029             : 
    3030      167424 :     if (analytic_ic_active() .and. cnst_is_a_water_species(cnst_name(m))) cycle
    3031             : 
    3032      167424 :     readvar   = .false.
    3033      167424 :     fieldname = cnst_name(m)
    3034      167424 :     if (cnst_read_iv(m) .and. dyn_field_exists(fh_ini, trim(cnst_name(m)), required=.false.)) then
    3035             :       call infld(fieldname, fh_ini, 'lon', 'lat', 'lev', ifirstxy, ilastxy, jfirstxy, jlastxy, &
    3036      136704 :            1, km, dyn_in%tracer(:,:,:,m), readvar, gridname='fv_centers')
    3037             :     end if
    3038      168192 :     call process_inidat(grid, dyn_in, 'CONSTS', m_cnst=m, fh_ini=fh_ini)
    3039             :   end do
    3040             : 
    3041             :   ! Set u3s(:,1,:) to zero as it is used in interpolation routines
    3042        3072 :   if ((jfirstxy == 1) .and. (size(dyn_in%u3s) > 0)) then
    3043       21012 :     dyn_in%u3s(ifirstxy:ilastxy,jfirstxy,1:km) = 0.0_r8
    3044             :   end if
    3045             : 
    3046             :   ! These always happen
    3047         768 :   call process_inidat(grid, dyn_in, 'PS')
    3048         768 :   call process_inidat(grid, dyn_in, 'T')
    3049             : 
    3050         768 : end subroutine read_inidat
    3051             : 
    3052             : !=========================================================================================
    3053             : 
    3054        1536 : subroutine set_phis(dyn_in)
    3055             : 
    3056             :    ! Set PHIS according to the following rules.
    3057             :    !
    3058             :    ! 1) If a topo file is specified use it.  This option has highest precedence.
    3059             :    ! 2) If not using topo file, but analytic_ic option is on, use analytic phis.
    3060             :    ! 3) Set phis = 0.0.
    3061             : 
    3062             :    ! Arguments
    3063             :    type (dyn_import_t), target, intent(inout) :: dyn_in   ! dynamics import
    3064             : 
    3065             :    ! local variables
    3066             :    type(file_desc_t),      pointer :: fh_topo
    3067             :    type (t_fvdycore_grid), pointer :: grid
    3068             : 
    3069             :    integer :: ifirstxy, ilastxy, jfirstxy, jlastxy
    3070             :    integer :: ierr
    3071             :    integer :: lonid
    3072             :    integer :: latid
    3073             :    integer :: mlon            ! longitude dimension length from dataset
    3074             :    integer :: mlat            ! latitude dimension length from dataset
    3075             :    integer :: nglon, nglat
    3076             :    integer :: i, j, m
    3077             : 
    3078        1536 :    integer, allocatable :: glob_ind(:)
    3079             : 
    3080             :    character(len=16)                :: fieldname
    3081             :    character(len=*), parameter      :: sub='set_phis'
    3082             :    !----------------------------------------------------------------------------
    3083             : 
    3084        1536 :    fh_topo => topo_file_get_id()
    3085             : 
    3086        1536 :    grid     => get_dyn_state_grid()
    3087        1536 :    ifirstxy =  grid%ifirstxy
    3088        1536 :    ilastxy  =  grid%ilastxy
    3089        1536 :    jfirstxy =  grid%jfirstxy
    3090        1536 :    jlastxy  =  grid%jlastxy
    3091             : 
    3092        1536 :    if (associated(fh_topo)) then    
    3093             :       !-----------
    3094             :       ! Check coord sizes
    3095             :       !-----------
    3096        1536 :       ierr = pio_inq_dimid(fh_topo, 'lon' , lonid)
    3097        1536 :       ierr = pio_inq_dimid(fh_topo, 'lat' , latid)
    3098        1536 :       ierr = pio_inq_dimlen(fh_topo, lonid , mlon)
    3099        1536 :       ierr = pio_inq_dimlen(fh_topo, latid , mlat)
    3100        1536 :       if (mlon /= plon .or. mlat /= plat) then
    3101           0 :          write(iulog,*) sub//': ERROR: model parameters do not match topo dataset parameters'
    3102           0 :          write(iulog,*)'Model Parameters:    plon = ',plon,' plat = ',plat
    3103           0 :          write(iulog,*)'Dataset Parameters:  dlon = ',mlon,' dlat = ',mlat
    3104           0 :          call endrun(sub//': ERROR: model parameters do not match topo dataset parameters')
    3105             :       end if
    3106             :     
    3107        1536 :       fieldname = 'PHIS'
    3108        1536 :       readvar   = .false.      
    3109             :       call infld(fieldname, fh_topo, 'lon', 'lat', ifirstxy, ilastxy, jfirstxy, jlastxy, &
    3110        1536 :          dyn_in%phis, readvar, gridname='fv_centers')
    3111        1536 :       if (.not. readvar) call endrun(sub//': ERROR: PHIS not found')
    3112             : 
    3113           0 :    else if (analytic_ic_active()) then
    3114             : 
    3115           0 :       allocate(glob_ind((ilastxy - ifirstxy + 1) * (jlastxy - jfirstxy + 1)))
    3116           0 :       call get_horiz_grid_dim_d(nglon, nglat)
    3117           0 :       m = 1
    3118           0 :       do j = jfirstxy, jlastxy
    3119           0 :          do i = ifirstxy, ilastxy
    3120             :             ! Create a global column index
    3121           0 :             glob_ind(m) = i + (j-1)*nglon
    3122           0 :             m = m + 1
    3123             :          end do
    3124             :       end do
    3125             : 
    3126           0 :       call analytic_ic_set_ic(vc_moist_pressure, clat(jfirstxy:jlastxy),      &
    3127           0 :          clon(ifirstxy:ilastxy,1), glob_ind, PHIS_OUT=dyn_in%phis)
    3128             : 
    3129             :    else
    3130             : 
    3131           0 :       dyn_in%phis(:,:) = 0._r8
    3132             : 
    3133             :    end if
    3134             : 
    3135        1536 :    call process_inidat(grid, dyn_in, 'PHIS')
    3136             : 
    3137        1536 : end subroutine set_phis
    3138             : 
    3139             : !=========================================================================================
    3140             : 
    3141      170496 : subroutine process_inidat(grid, dyn_in, fieldname, m_cnst, fh_ini)
    3142             : 
    3143             :    ! Post-process input fields
    3144             :    use commap,              only: clat, clon
    3145             :    use const_init,          only: cnst_init_default
    3146             :    use inic_analytic,       only: analytic_ic_active
    3147             : 
    3148             :    ! arguments
    3149             :    type(t_fvdycore_grid), target, intent(inout) :: grid        ! dynamics state grid
    3150             :    type(dyn_import_t),    target, intent(inout) :: dyn_in      ! dynamics import
    3151             :    character(len=*),              intent(in)    :: fieldname   ! field to be processed
    3152             :    integer,             optional, intent(in)    :: m_cnst      ! constituent index
    3153             :    type(file_desc_t),   optional, intent(inout) :: fh_ini
    3154             : 
    3155             :    ! Local variables
    3156             :    integer :: i, j, k                     ! grid and constituent indices
    3157             :    integer :: npes_xy
    3158             :    integer :: im, jm, km
    3159             :    integer :: ifirstxy, ilastxy, jfirstxy, jlastxy
    3160             : 
    3161             :    integer :: nglon, nglat, rndm_seed_sz
    3162      170496 :    integer, allocatable :: rndm_seed(:)
    3163             :    real(r8) :: pertval                       ! perturbation value
    3164             : 
    3165      170496 :    real(r8), pointer :: phisxy(:,:), psxy(:,:), t3xy(:,:,:)
    3166      170496 :    real(r8), pointer :: tracer(:,:,:,:)
    3167             : 
    3168      340992 :    real(r8) :: xsum(grid%km)               ! temp array for parallel sums
    3169             : 
    3170             :    integer :: err_handling
    3171             :    integer :: varid                        ! variable id
    3172             :    integer :: ret                          ! return values
    3173             :    character(len=256) :: trunits           ! tracer untis
    3174             :    character(len=3)   :: mixing_ratio
    3175             : 
    3176             :    character(len=*), parameter :: sub='process_inidat'
    3177             :    !----------------------------------------------------------------------------
    3178             : 
    3179      170496 :    psxy   => dyn_in%ps
    3180      170496 :    phisxy => dyn_in%phis
    3181      170496 :    t3xy   => dyn_in%t3
    3182             : 
    3183      170496 :    npes_xy  = grid%npes_xy
    3184      170496 :    im       = grid%im
    3185      170496 :    jm       = grid%jm
    3186      170496 :    km       = grid%km
    3187      170496 :    ifirstxy = grid%ifirstxy
    3188      170496 :    ilastxy  = grid%ilastxy
    3189      170496 :    jfirstxy = grid%jfirstxy
    3190      170496 :    jlastxy  = grid%jlastxy
    3191             : 
    3192         768 :    select case (fieldname)
    3193             : 
    3194             :    case ('T')
    3195             : 
    3196         768 :       if (iam >= npes_xy) return
    3197             : 
    3198             :       ! Add random perturbation to temperature if requested
    3199         768 :       if ((pertlim /= 0._r8) .and. (.not. analytic_ic_active())) then
    3200             : 
    3201           0 :          if (masterproc) then
    3202           0 :             write(iulog,*) sub//':  Adding random perturbation bounded by +/-', &
    3203           0 :                            pertlim,' to initial temperature field'
    3204             :          end if
    3205             : 
    3206           0 :          call get_horiz_grid_dim_d(nglon, nglat)
    3207           0 :          call random_seed(size=rndm_seed_sz)
    3208           0 :          allocate(rndm_seed(rndm_seed_sz))
    3209             : 
    3210           0 :          do j = jfirstxy, jlastxy
    3211           0 :             do i = ifirstxy, ilastxy
    3212             :                ! seed random_number generator based on global column index
    3213           0 :                rndm_seed = i + (j-1)*nglon
    3214           0 :                call random_seed(put=rndm_seed)
    3215           0 :                do k = 1, km
    3216           0 :                   call random_number(pertval)
    3217           0 :                   pertval = 2._r8*pertlim*(0.5_r8 - pertval)
    3218           0 :                   t3xy(i,j,k) = t3xy(i,j,k)*(1._r8 + pertval)
    3219             :                end do
    3220             :             end do
    3221             :          end do
    3222             : 
    3223           0 :          deallocate(rndm_seed)
    3224             :       end if
    3225             : 
    3226             :       ! Average T at the poles.
    3227         768 :       if (jfirstxy == 1) then
    3228          12 :          call par_xsum(grid, t3xy(:,1,:), km, xsum)
    3229         852 :          do k=1, km
    3230       21012 :             do i = ifirstxy, ilastxy
    3231       21000 :                t3xy(i,1,k) = xsum(k) / real(im,r8)
    3232             :             end do
    3233             :          end do
    3234             :       end if
    3235         768 :       if (jlastxy == jm) then
    3236          12 :          call par_xsum(grid, t3xy(:,jm,:), km, xsum)
    3237         852 :          do k = 1, km
    3238       21012 :             do i = ifirstxy, ilastxy
    3239       21000 :                t3xy(i,jm,k) = xsum(k) / real(im,r8)
    3240             :             end do
    3241             :          end do
    3242             :       end if
    3243             : 
    3244             :    case ('CONSTS')
    3245             : 
    3246      167424 :       if (.not. present(m_cnst)) then
    3247             :          call endrun(sub//': ERROR:  m_cnst needs to be present in the'// &
    3248           0 :                      ' argument list')
    3249             :       end if
    3250             : 
    3251      167424 :       tracer => dyn_in%tracer
    3252             : 
    3253      167424 :       if (readvar) then
    3254             : 
    3255      136704 :          if (.not. present(fh_ini)) then
    3256             :             call endrun(sub//': ERROR:  fh_ini needs to be present in the'// &
    3257           0 :                         ' argument list')
    3258             :          end if
    3259             : 
    3260             :          ! Check that all tracer units are in mass mixing ratios
    3261      136704 :          ret = pio_inq_varid(fh_ini, cnst_name(m_cnst), varid)
    3262      136704 :          ret = pio_get_att (fh_ini, varid, 'units', trunits)
    3263      136704 :          if (trunits(1:5) .ne. 'KG/KG' .and. trunits(1:5) .ne. 'kg/kg') then
    3264             :             call endrun(sub//': ERROR:  Units for tracer ' &
    3265           0 :                   //trim(cnst_name(m_cnst))//' must be in KG/KG')
    3266             :          end if
    3267             : 
    3268             :          ! Check for mixing_ratio attribute.  If present then use it to
    3269             :          ! specify whether the initial file contains wet or dry values.  If
    3270             :          ! not present then assume the mixing ratio is wet.  This is for
    3271             :          ! backwards compatibility with old initial files that were written
    3272             :          ! will all wet mixing ratios.
    3273             : 
    3274             :          ! We will handle errors for this routine
    3275      136704 :          call pio_seterrorhandling(fh_ini, pio_bcast_error, err_handling)
    3276             : 
    3277      136704 :          ret = pio_get_att(fh_ini, varid, 'mixing_ratio', mixing_ratio)
    3278      136704 :          if (ret == pio_noerr) then
    3279      136704 :             initial_mr(m_cnst) = mixing_ratio
    3280             :          else
    3281           0 :             initial_mr(m_cnst) = 'wet'
    3282             :          end if
    3283             : 
    3284             :          ! reset PIO to handle errors as before
    3285      136704 :          call pio_seterrorhandling(fh_ini, err_handling)
    3286             : 
    3287             : 
    3288       30720 :       else if (.not. analytic_ic_active()) then
    3289             : 
    3290             :          ! Constituents not read from initial file are initialized by the
    3291             :          ! package that implements them.  Note that the analytic IC code calls
    3292             :          ! cnst_init_default internally.
    3293             : 
    3294       30720 :          if (iam >= npes_xy) return
    3295             : 
    3296       30720 :          if (m_cnst == 1 .and. moist_physics) then
    3297           0 :             call endrun(sub//': ERROR:  Q must be on Initial File')
    3298             :          end if
    3299             : 
    3300       30720 :          call cnst_init_default(m_cnst, clat(jfirstxy:jlastxy), clon(ifirstxy:ilastxy,1), tracer(:,:,:,m_cnst))
    3301             :       end if
    3302             : 
    3303      167424 :       if (.not. analytic_ic_active()) then
    3304    11887104 :          do k = 1, km
    3305    47046144 :             do j = jfirstxy, jlastxy
    3306   890695680 :                do i = ifirstxy, ilastxy
    3307   878976000 :                   tracer(i,j,k,m_cnst) = max(tracer(i,j,k,m_cnst), qmin(m_cnst))
    3308             :                end do
    3309             :             end do
    3310             :          end do
    3311             :       end if
    3312             : 
    3313      167424 :       if (iam >= npes_xy) return
    3314             : 
    3315             :       ! Compute polar average
    3316      167424 :       if (jfirstxy == 1) then
    3317     9158616 :          call par_xsum(grid, dyn_in%tracer(:,1,:,m_cnst), km, xsum)
    3318      185736 :          do k = 1, km
    3319     4580616 :             do i = ifirstxy, ilastxy
    3320     4578000 :                dyn_in%tracer(i,1,k,m_cnst) = xsum(k) / real(im,r8)
    3321             :             end do
    3322             :          end do
    3323             :       end if
    3324      167424 :       if (jlastxy == jm) then
    3325     9158616 :          call par_xsum(grid, dyn_in%tracer(:,jm,:,m_cnst), km, xsum)
    3326      185736 :          do k = 1, km
    3327     4580616 :             do i = ifirstxy, ilastxy
    3328     4578000 :                dyn_in%tracer(i,jm,k,m_cnst) = xsum(k) / real(im,r8)
    3329             :             end do
    3330             :          end do
    3331             :       end if
    3332             : 
    3333             :    case ('PS')
    3334             : 
    3335             :       ! Average PS at the poles.
    3336         768 :       if (jfirstxy == 1) then
    3337          12 :          if (size(psxy,2) > 0) then
    3338          12 :             call par_xsum(grid, psxy(:,1:1), 1, xsum(1:1))
    3339         300 :             do i = ifirstxy, ilastxy
    3340         300 :                psxy(i,1) = xsum(1) / real(im,r8)
    3341             :             end do
    3342             :          end if
    3343             :       end if
    3344         768 :       if (jlastxy == jm) then
    3345          12 :          call par_xsum(grid, psxy(:,jm:jm), 1, xsum(1:1))
    3346         300 :          do i = ifirstxy, ilastxy
    3347         300 :             psxy(i,jm) = xsum(1) / real(im,r8)
    3348             :          end do
    3349             :       end if
    3350             : 
    3351             :    case ('PHIS')
    3352             : 
    3353             :       ! Average PHIS at the poles.
    3354        1536 :       if (jfirstxy == 1) then
    3355          24 :          if (size(phisxy,2) > 0) then
    3356          24 :             call par_xsum(grid, phisxy(:,1:1), 1, xsum(1:1))
    3357         600 :             do i = ifirstxy, ilastxy
    3358         600 :                phisxy(i,1) = xsum(1) / real(im,r8)
    3359             :             end do
    3360             :          end if
    3361             :       end if
    3362        1536 :       if (jlastxy == jm) then
    3363          24 :          call par_xsum(grid, phisxy(:,jm:jm), 1, xsum(1:1))
    3364         600 :          do i = ifirstxy, ilastxy
    3365         600 :             phisxy(i,jm) = xsum(1) / real(im,r8)
    3366             :          end do
    3367             :       end if
    3368             : 
    3369             :    end select
    3370             : 
    3371      340992 : end subroutine process_inidat
    3372             : 
    3373             : !=========================================================================================
    3374             : 
    3375      167424 : logical function dyn_field_exists(fh, fieldname, required)
    3376             : 
    3377             :    use pio,            only: var_desc_t, PIO_inq_varid
    3378             :    use pio,            only: PIO_NOERR
    3379             : 
    3380             :    type(file_desc_t), intent(inout) :: fh ! needs to be inout because of pio_seterrorhandling
    3381             :    character(len=*),  intent(in) :: fieldname
    3382             :    logical, optional, intent(in) :: required
    3383             : 
    3384             :    ! Local variables
    3385             :    logical                  :: found
    3386             :    logical                  :: field_required
    3387             :    integer                  :: ret
    3388             :    integer                  :: pio_errtype
    3389             :    type(var_desc_t)         :: varid
    3390             :    character(len=128)       :: errormsg
    3391             :    !--------------------------------------------------------------------------
    3392             : 
    3393      167424 :    if (present(required)) then
    3394      167424 :       field_required = required
    3395             :    else
    3396             :       field_required = .true.
    3397             :    end if
    3398             : 
    3399             :    ! Set PIO to return error codes when reading data from IC file.
    3400      167424 :    call pio_seterrorhandling(fh, pio_bcast_error, oldmethod=pio_errtype)
    3401             : 
    3402      167424 :    ret = PIO_inq_varid(fh, trim(fieldname), varid)
    3403      167424 :    found = (ret == PIO_NOERR)
    3404      167424 :    if (.not. found) then
    3405       30720 :       if (field_required) then
    3406           0 :          write(errormsg, *) trim(fieldname),' was not present in the input file.'
    3407           0 :          call endrun('DYN_FIELD_EXISTS: '//errormsg)
    3408             :       end if
    3409             :    end if
    3410             : 
    3411      167424 :    dyn_field_exists = found
    3412             : 
    3413             :    ! Put the error handling back the way it was
    3414      167424 :    call pio_seterrorhandling(fh, pio_errtype)
    3415             : 
    3416      167424 : end function dyn_field_exists
    3417             : 
    3418             : !=========================================================================================
    3419             : 
    3420           0 : end module dyn_comp

Generated by: LCOV version 1.14