From b9d1248eacee4575b6bfc92cae50661ef18f96f9 Mon Sep 17 00:00:00 2001 From: "Rusty.Benson" Date: Thu, 14 Apr 2022 09:33:55 -0400 Subject: [PATCH 1/7] fixes io performance issues by making everyone a reader when io_layout=1,1 adds capability to use FMS feature to ignore data integrity checksums in restarts --- driver/fvGFS/atmosphere.F90 | 7 +++-- model/fv_arrays.F90 | 2 ++ model/fv_control.F90 | 9 ++++-- tools/external_ic.F90 | 59 ++++++++++++++++--------------------- tools/fv_io.F90 | 28 +++++++++--------- tools/fv_mp_mod.F90 | 18 +++++++++-- tools/fv_restart.F90 | 6 ++-- 7 files changed, 70 insertions(+), 59 deletions(-) diff --git a/driver/fvGFS/atmosphere.F90 b/driver/fvGFS/atmosphere.F90 index 7af45f664..f2f68eb6e 100644 --- a/driver/fvGFS/atmosphere.F90 +++ b/driver/fvGFS/atmosphere.F90 @@ -357,7 +357,7 @@ subroutine atmosphere_init (Time_init, Time, Time_step, Grid_box, area) #ifdef MOVING_NEST call fv_tracker_init(size(Atm)) if (mygrid .eq. 2) call allocate_tracker(mygrid, Atm(mygrid)%bd%isc, Atm(mygrid)%bd%iec, Atm(mygrid)%bd%jsc, Atm(mygrid)%bd%jec) -#endif +#endif Atm(mygrid)%Time_init = Time_init @@ -953,10 +953,10 @@ end subroutine get_nth_domain_info !! the "domain2d" variable associated with the coupling grid and the !! decomposition for the current cubed-sphere tile. !>@detail Coupling is done using the mass/temperature grid with no halos. - subroutine atmosphere_domain ( fv_domain, layout, regional, nested, & + subroutine atmosphere_domain ( fv_domain, rd_domain, layout, regional, nested, & moving_nest_parent, is_moving_nest, & ngrids_atmos, mygrid_atmos, pelist ) - type(domain2d), intent(out) :: fv_domain + type(domain2d), intent(out) :: fv_domain, rd_domain integer, intent(out) :: layout(2) logical, intent(out) :: regional logical, intent(out) :: nested @@ -969,6 +969,7 @@ subroutine atmosphere_domain ( fv_domain, layout, regional, nested, & integer :: n fv_domain = Atm(mygrid)%domain_for_coupler + rd_domain = Atm(mygrid)%domain_for_read layout(1:2) = Atm(mygrid)%layout(1:2) regional = Atm(mygrid)%flagstruct%regional nested = ngrids > 1 diff --git a/model/fv_arrays.F90 b/model/fv_arrays.F90 index b89dcefa1..6edc03d18 100644 --- a/model/fv_arrays.F90 +++ b/model/fv_arrays.F90 @@ -516,6 +516,7 @@ module fv_arrays_mod !----------------------------------------------------------------------------------------------- logical :: reset_eta = .false. + logical :: enforce_rst_cksum = .true. !< enfore or override data integrity restart checksums real :: p_fac = 0.05 !< Safety factor for minimum nonhydrostatic pressures, which !< will be limited so the full pressure is no less than p_fac !< times the hydrostatic pressure. This is only of concern in mid-top @@ -1303,6 +1304,7 @@ module fv_arrays_mod #if defined(SPMD) type(domain2D) :: domain_for_coupler !< domain used in coupled model with halo = 1. + type(domain2D) :: domain_for_read !< domain used for reads to increase performance when io_layout=(1,1) !global tile and tile_of_mosaic only have a meaning for the CURRENT pe integer :: num_contact, npes_per_tile, global_tile, tile_of_mosaic, npes_this_grid diff --git a/model/fv_control.F90 b/model/fv_control.F90 index 5b24ccf56..35584b0d5 100644 --- a/model/fv_control.F90 +++ b/model/fv_control.F90 @@ -198,7 +198,7 @@ subroutine fv_control_init(Atm, dt_atmos, this_grid, grids_on_this_pe, p_split, real, intent(in) :: dt_atmos integer, intent(OUT) :: this_grid logical, allocatable, intent(OUT) :: grids_on_this_pe(:) - character(len=32), optional, intent(in) :: nml_filename_in ! alternate nml + character(len=32), optional, intent(in) :: nml_filename_in ! alternate nml logical, optional, intent(in) :: skip_nml_read_in ! use previously loaded nml integer, intent(INOUT) :: p_split @@ -291,6 +291,7 @@ subroutine fv_control_init(Atm, dt_atmos, this_grid, grids_on_this_pe, p_split, real(kind=R_GRID) , pointer :: target_lon logical , pointer :: reset_eta + logical , pointer :: enforce_rst_cksum real , pointer :: p_fac real , pointer :: a_imp integer , pointer :: n_split @@ -676,7 +677,8 @@ subroutine fv_control_init(Atm, dt_atmos, this_grid, grids_on_this_pe, p_split, Atm(this_grid)%flagstruct%grid_type,Atm(this_grid)%neststruct%nested, & Atm(this_grid)%layout,Atm(this_grid)%io_layout,Atm(this_grid)%bd,Atm(this_grid)%tile_of_mosaic, & Atm(this_grid)%gridstruct%square_domain,Atm(this_grid)%npes_per_tile,Atm(this_grid)%domain, & - Atm(this_grid)%domain_for_coupler,Atm(this_grid)%num_contact,Atm(this_grid)%pelist) + Atm(this_grid)%domain_for_coupler,Atm(this_grid)%domain_for_read,Atm(this_grid)%num_contact, & + Atm(this_grid)%pelist) call broadcast_domains(Atm,Atm(this_grid)%pelist,size(Atm(this_grid)%pelist)) do n=1,ngrids tile_id = mpp_get_tile_id(Atm(n)%domain) @@ -857,6 +859,7 @@ subroutine set_namelist_pointers(Atm) regional_bcs_from_gsi => Atm%flagstruct%regional_bcs_from_gsi write_restart_with_bcs => Atm%flagstruct%write_restart_with_bcs reset_eta => Atm%flagstruct%reset_eta + enforce_rst_cksum => Atm%flagstruct%enforce_rst_cksum p_fac => Atm%flagstruct%p_fac a_imp => Atm%flagstruct%a_imp n_split => Atm%flagstruct%n_split @@ -1075,7 +1078,7 @@ subroutine read_namelist_fv_core_nml(Atm) write_coarse_restart_files,& write_coarse_diagnostics,& write_only_coarse_intermediate_restarts, & - write_coarse_agrid_vel_rst, write_coarse_dgrid_vel_rst + write_coarse_agrid_vel_rst, write_coarse_dgrid_vel_rst, enforce_rst_cksum ! Read FVCORE namelist diff --git a/tools/external_ic.F90 b/tools/external_ic.F90 index 56cd554f7..cec8bd1f3 100644 --- a/tools/external_ic.F90 +++ b/tools/external_ic.F90 @@ -206,10 +206,9 @@ module external_ic_mod contains - subroutine get_external_ic( Atm, fv_domain, cold_start ) + subroutine get_external_ic( Atm, cold_start ) type(fv_atmos_type), intent(inout), target :: Atm - type(domain2d), intent(inout) :: fv_domain logical, intent(IN) :: cold_start real:: alpha = 0. real rdg @@ -260,14 +259,14 @@ subroutine get_external_ic( Atm, fv_domain, cold_start ) enddo enddo - call mpp_update_domains( f0, fv_domain ) + call mpp_update_domains( f0, Atm%domain ) if ( Atm%gridstruct%cubed_sphere .and. (.not. Atm%gridstruct%bounded_domain))then call fill_corners(f0, Atm%npx, Atm%npy, YDir) endif ! Read in cubed_sphere terrain if ( Atm%flagstruct%mountain ) then - call get_cubed_sphere_terrain(Atm, fv_domain) + call get_cubed_sphere_terrain(Atm) else if (.not. Atm%neststruct%nested) Atm%phis = 0. !TODO: Not sure about this line --- lmh 30 may 18 endif @@ -276,32 +275,32 @@ subroutine get_external_ic( Atm, fv_domain, cold_start ) if ( Atm%flagstruct%ncep_ic ) then nq = 1 call timing_on('NCEP_IC') - call get_ncep_ic( Atm, fv_domain, nq ) + call get_ncep_ic( Atm, nq ) call timing_off('NCEP_IC') #ifdef FV_TRACERS if (.not. cold_start) then - call fv_io_read_tracers( fv_domain, Atm ) + call fv_io_read_tracers( Atm ) if(is_master()) write(*,*) 'All tracers except sphum replaced by FV IC' endif #endif elseif ( Atm%flagstruct%nggps_ic ) then call timing_on('NGGPS_IC') - call get_nggps_ic( Atm, fv_domain ) + call get_nggps_ic( Atm ) call timing_off('NGGPS_IC') elseif ( Atm%flagstruct%hrrrv3_ic ) then call timing_on('HRRR_IC') - call get_hrrr_ic( Atm, fv_domain ) + call get_hrrr_ic( Atm ) call timing_off('HRRR_IC') elseif ( Atm%flagstruct%ecmwf_ic ) then if( is_master() ) write(*,*) 'Calling get_ecmwf_ic' call timing_on('ECMWF_IC') - call get_ecmwf_ic( Atm, fv_domain ) + call get_ecmwf_ic( Atm ) call timing_off('ECMWF_IC') else ! The following is to read in legacy lat-lon FV core restart file ! is Atm%q defined in all cases? nq = size(Atm%q,4) - call get_fv_ic( Atm, fv_domain, nq ) + call get_fv_ic( Atm, nq ) endif call prt_maxmin('PS', Atm%ps, is, ie, js, je, ng, 1, 0.01) @@ -368,9 +367,8 @@ end subroutine get_external_ic !------------------------------------------------------------------ - subroutine get_cubed_sphere_terrain( Atm, fv_domain ) + subroutine get_cubed_sphere_terrain( Atm ) type(fv_atmos_type), intent(inout), target :: Atm - type(domain2d), intent(inout) :: fv_domain type(FmsNetcdfDomainFile_t) :: Fv_core integer :: tile_id(1) character(len=64) :: fname @@ -393,13 +391,13 @@ subroutine get_cubed_sphere_terrain( Atm, fv_domain ) jed = Atm%bd%jed ng = Atm%bd%ng - tile_id = mpp_get_tile_id( fv_domain ) + tile_id = mpp_get_tile_id( Atm%domain ) fname = 'INPUT/fv_core.res.nc' call mpp_error(NOTE, 'external_ic: looking for '//fname) - if( open_file(Fv_core, fname, "read", fv_domain, is_restart=.true.) ) then + if( open_file(Fv_core, fname, "read", Atm%domain_for_read, is_restart=.true.) ) then call read_data(Fv_core, 'phis', Atm%phis(is:ie,js:je)) call close_file(Fv_core) else @@ -428,7 +426,7 @@ end subroutine get_cubed_sphere_terrain !>@brief The subroutine 'get_nggps_ic' reads in data after it has been preprocessed with !! NCEP/EMC orography maker and 'global_chgres', and has been horiztontally !! interpolated to the current cubed-sphere grid - subroutine get_nggps_ic (Atm, fv_domain) + subroutine get_nggps_ic (Atm) !>variables read in from 'gfs_ctrl.nc' !> VCOORD - level information @@ -456,7 +454,6 @@ subroutine get_nggps_ic (Atm, fv_domain) #endif type(fv_atmos_type), intent(inout) :: Atm - type(domain2d), intent(inout) :: fv_domain ! local: real, dimension(:), allocatable:: ak, bk real, dimension(:,:), allocatable:: wk2, ps, oro_g @@ -585,7 +582,7 @@ subroutine get_nggps_ic (Atm, fv_domain) !--- read in surface temperature (k) and land-frac ! surface skin temperature - if( open_file(SFC_restart, fn_sfc_ics, "read", Atm%domain, is_restart=.true., dont_add_res_to_filename=.true.) ) then + if( open_file(SFC_restart, fn_sfc_ics, "read", Atm%domain_for_read, is_restart=.true., dont_add_res_to_filename=.true.) ) then naxis_dims = get_variable_num_dimensions(SFC_restart, 'tsea') allocate (dim_names_alloc(naxis_dims)) call get_variable_dimension_names(SFC_restart, 'tsea', dim_names_alloc) @@ -605,7 +602,7 @@ subroutine get_nggps_ic (Atm, fv_domain) dim_names_2d(2) = "lon" ! terrain surface height -- (needs to be transformed into phis = zs*grav) - if( open_file(ORO_restart, fn_oro_ics, "read", Atm%domain, is_restart=.true., dont_add_res_to_filename=.true.) ) then + if( open_file(ORO_restart, fn_oro_ics, "read", Atm%domain_for_read, is_restart=.true., dont_add_res_to_filename=.true.) ) then call register_axis(ORO_restart, "lat", "y") call register_axis(ORO_restart, "lon", "x") if (filtered_terrain) then @@ -915,7 +912,7 @@ subroutine read_gfs_ic() dim_names_3d4(1) = "levp" ! surface pressure (Pa) - if( open_file(GFS_restart, fn_gfs_ics, "read", Atm%domain, is_restart=.true., dont_add_res_to_filename=.true.) ) then + if( open_file(GFS_restart, fn_gfs_ics, "read", Atm%domain_for_read, is_restart=.true., dont_add_res_to_filename=.true.) ) then call register_axis(GFS_restart, "lat", "y") call register_axis(GFS_restart, "lon", "x") call register_axis(GFS_restart, "lonp", "x", domain_position=east) @@ -945,7 +942,7 @@ subroutine read_gfs_ic() do nt = 1, ntracers q(:,:,:,nt) = -999.99 - + call get_tracer_names(MODEL_ATMOS, nt, tracer_name) call register_restart_field(GFS_restart, trim(tracer_name), q(:,:,:,nt), dim_names_3d3, is_optional=.true.) enddo @@ -964,7 +961,7 @@ subroutine read_gfs_ic() end subroutine get_nggps_ic !------------------------------------------------------------------ !------------------------------------------------------------------ - subroutine get_hrrr_ic (Atm, fv_domain) + subroutine get_hrrr_ic (Atm) ! read in data after it has been preprocessed with ! NCEP/EMC orography maker ! @@ -990,7 +987,6 @@ subroutine get_hrrr_ic (Atm, fv_domain) type(fv_atmos_type), intent(inout) :: Atm - type(domain2d), intent(inout) :: fv_domain ! local: real, dimension(:), allocatable:: ak, bk real, dimension(:,:), allocatable:: wk2, ps, oro_g @@ -1104,7 +1100,7 @@ subroutine get_hrrr_ic (Atm, fv_domain) !--- read in surface temperature (k) and land-frac ! surface skin temperature - if( open_file(SFC_restart, fn_sfc_ics, "read", Atm%domain, is_restart=.true., dont_add_res_to_filename=.true.) ) then + if( open_file(SFC_restart, fn_sfc_ics, "read", Atm%domain_for_read, is_restart=.true., dont_add_res_to_filename=.true.) ) then call get_variable_dimension_names(SFC_restart, 'tsea', dim_names_2d) call register_axis(SFC_restart, dim_names_2d(2), "y") call register_axis(SFC_restart, dim_names_2d(1), "x") @@ -1121,7 +1117,7 @@ subroutine get_hrrr_ic (Atm, fv_domain) dim_names_2d(2) = "lon" ! terrain surface height -- (needs to be transformed into phis = zs*grav) - if( open_file(ORO_restart, fn_oro_ics, "read", Atm%domain, is_restart=.true., dont_add_res_to_filename=.true.) ) then + if( open_file(ORO_restart, fn_oro_ics, "read", Atm%domain_for_read, is_restart=.true., dont_add_res_to_filename=.true.) ) then call register_axis(ORO_restart, "lat", "y") call register_axis(ORO_restart, "lon", "x") if (filtered_terrain) then @@ -1164,7 +1160,7 @@ subroutine get_hrrr_ic (Atm, fv_domain) dim_names_3d4(1) = "levp" ! edge pressure (Pa) - if( open_file(HRRR_restart, fn_hrr_ics, "read", Atm%domain,is_restart=.true., dont_add_res_to_filename=.true.) ) then + if( open_file(HRRR_restart, fn_hrr_ics, "read", Atm%domain_for_read, is_restart=.true., dont_add_res_to_filename=.true.) ) then call register_axis(HRRR_restart, "lat", "y") call register_axis(HRRR_restart, "lon", "x") call register_axis(HRRR_restart, "lonp", "x", domain_position=east) @@ -1360,9 +1356,8 @@ end subroutine get_hrrr_ic !------------------------------------------------------------------ !------------------------------------------------------------------ !>@brief The subroutine 'get_ncep_ic' reads in the specified NCEP analysis or reanalysis dataset - subroutine get_ncep_ic( Atm, fv_domain, nq ) + subroutine get_ncep_ic( Atm, nq ) type(fv_atmos_type), intent(inout) :: Atm - type(domain2d), intent(inout) :: fv_domain integer, intent(in):: nq ! local: #ifdef HIWPP_ETA @@ -1818,7 +1813,7 @@ end subroutine get_ncep_ic !>@brief The subroutine 'get_ecmwf_ic' reads in initial conditions from ECMWF analyses !! (EXPERIMENTAL: contact Jan-Huey Chen jan-huey.chen@noaa.gov for support) !>@authors Jan-Huey Chen, Xi Chen, Shian-Jiann Lin - subroutine get_ecmwf_ic( Atm, fv_domain ) + subroutine get_ecmwf_ic( Atm ) #ifdef __PGI use GFS_restart, only : GFS_restart_type @@ -1827,7 +1822,6 @@ subroutine get_ecmwf_ic( Atm, fv_domain ) #endif type(fv_atmos_type), intent(inout) :: Atm - type(domain2d), intent(inout) :: fv_domain ! local: real :: ak_ec(138), bk_ec(138) data ak_ec/ 0.000000, 2.000365, 3.102241, 4.666084, 6.827977, 9.746966, & @@ -2062,7 +2056,7 @@ subroutine get_ecmwf_ic( Atm, fv_domain ) dim_names_3d4(1) = "levp" !! Read in model terrain from oro_data.tile?.nc - if( open_file(ORO_restart, fn_oro_ics, "read", Atm%domain, is_restart=.true., dont_add_res_to_filename=.true.) ) then + if( open_file(ORO_restart, fn_oro_ics, "read", Atm%domain_for_read, is_restart=.true., dont_add_res_to_filename=.true.) ) then call register_axis(ORO_restart, "lat", "y") call register_axis(ORO_restart, "lon", "x") if (filtered_terrain) then @@ -2088,7 +2082,7 @@ subroutine get_ecmwf_ic( Atm, fv_domain ) allocate (ps_gfs(is:ie,js:je)) allocate (zh_gfs(is:ie,js:je,levp_gfs+1)) - if( open_file(GFS_restart, fn_gfs_ics, "read", Atm%domain, is_restart=.true., dont_add_res_to_filename=.true.) ) then + if( open_file(GFS_restart, fn_gfs_ics, "read", Atm%domain_for_read, is_restart=.true., dont_add_res_to_filename=.true.) ) then call register_axis(GFS_restart, "lat", "y") call register_axis(GFS_restart, "lon", "x") call register_axis(GFS_restart, "levp", size(zh_gfs,3)) @@ -2622,9 +2616,8 @@ subroutine get_ecmwf_ic( Atm, fv_domain ) end subroutine get_ecmwf_ic !------------------------------------------------------------------ !------------------------------------------------------------------ - subroutine get_fv_ic( Atm, fv_domain, nq ) + subroutine get_fv_ic( Atm, nq ) type(fv_atmos_type), intent(inout) :: Atm - type(domain2d), intent(inout) :: fv_domain integer, intent(in):: nq type(FmsNetcdfFile_t) :: Latlon_dyn, Latlon_tra diff --git a/tools/fv_io.F90 b/tools/fv_io.F90 index e892b6811..b1547824e 100644 --- a/tools/fv_io.F90 +++ b/tools/fv_io.F90 @@ -419,7 +419,7 @@ subroutine fv_io_read_restart(fv_domain,Atm) Atm(1)%Fv_restart_tile_is_open = open_file(Atm(1)%Fv_restart_tile, fname, "read", fv_domain, is_restart=.true.) if (Atm(1)%Fv_restart_tile_is_open) then call fv_io_register_restart(Atm(1)) - call read_restart(Atm(1)%Fv_restart_tile) + call read_restart(Atm(1)%Fv_restart_tile, ignore_checksum=Atm(1)%flagstruct%enforce_rst_cksum) call close_file(Atm(1)%Fv_restart_tile) Atm(1)%Fv_restart_tile_is_open = .false. endif @@ -429,7 +429,7 @@ subroutine fv_io_read_restart(fv_domain,Atm) Atm(1)%Tra_restart_is_open = open_file(Atm(1)%Tra_restart, fname, "read", fv_domain, is_restart=.true.) if (Atm(1)%Tra_restart_is_open) then call fv_io_register_restart(Atm(1)) - call read_restart(Atm(1)%Tra_restart) + call read_restart(Atm(1)%Tra_restart, ignore_checksum=Atm(1)%flagstruct%enforce_rst_cksum) call close_file(Atm(1)%Tra_restart) Atm(1)%Tra_restart_is_open = .false. else @@ -442,7 +442,7 @@ subroutine fv_io_read_restart(fv_domain,Atm) if (Atm(1)%Rsf_restart_is_open) then Atm(1)%flagstruct%srf_init = .true. call fv_io_register_restart(Atm(1)) - call read_restart(Atm(1)%Rsf_restart) + call read_restart(Atm(1)%Rsf_restart, ignore_checksum=Atm(1)%flagstruct%enforce_rst_cksum) call close_file(Atm(1)%Rsf_restart) Atm(1)%Rsf_restart_is_open = .false. else @@ -456,7 +456,7 @@ subroutine fv_io_read_restart(fv_domain,Atm) Atm(1)%Mg_restart_is_open = open_file(Atm(1)%Mg_restart, fname, "read", fv_domain, is_restart=.true.) if (Atm(1)%Mg_restart_is_open) then call fv_io_register_restart(Atm(1)) - call read_restart(Atm(1)%Mg_restart) + call read_restart(Atm(1)%Mg_restart, ignore_checksum=Atm(1)%flagstruct%enforce_rst_cksum) call close_file(Atm(1)%Mg_restart) Atm(1)%Mg_restart_is_open = .false. else @@ -467,7 +467,7 @@ subroutine fv_io_read_restart(fv_domain,Atm) Atm(1)%Lnd_restart_is_open = open_file(Atm(1)%Lnd_restart, fname, "read", fv_domain, is_restart=.true.) if (Atm(1)%Lnd_restart_is_open) then call fv_io_register_restart(Atm(1)) - call read_restart(Atm(1)%Lnd_restart) + call read_restart(Atm(1)%Lnd_restart, ignore_checksum=Atm(1)%flagstruct%enforce_rst_cksum) call close_file(Atm(1)%Lnd_restart) Atm(1)%Lnd_restart_is_open = .false. else @@ -483,8 +483,7 @@ end subroutine fv_io_read_restart !>@details This subroutine is useful when initializing a cycled or nudged model !! from an analysis that does not have a whole set of microphysical, aerosol, or !! chemical tracers - subroutine fv_io_read_tracers(fv_domain,Atm) - type(domain2d), intent(inout) :: fv_domain + subroutine fv_io_read_tracers(Atm) type(fv_atmos_type), intent(inout) :: Atm(:) integer :: ntracers, ntprog, nt, isc, iec, jsc, jec character(len=6) :: stile_name @@ -499,7 +498,7 @@ subroutine fv_io_read_tracers(fv_domain,Atm) call get_number_tracers(MODEL_ATMOS, num_tracers=ntracers, num_prog=ntprog) ! fix for single tile runs where you need fv_core.res.nc and fv_core.res.tile1.nc - ntiles = mpp_get_ntile_count(fv_domain) + ntiles = mpp_get_ntile_count(Atm(1)%domain_for_read) if(ntiles == 1 .and. .not. Atm(1)%neststruct%nested) then stile_name = '.tile1' else @@ -508,7 +507,7 @@ subroutine fv_io_read_tracers(fv_domain,Atm) fname = 'INPUT/fv_tracer.res'//trim(stile_name)//'.nc' - if (open_file(Tra_restart_r,fname,"read",fv_domain, is_restart=.true.)) then + if (open_file(Tra_restart_r,fname,"read",Atm(1)%domain_for_read, is_restart=.true.)) then do nt = 2, ntprog call get_tracer_names(MODEL_ATMOS, nt, tracer_name) call set_tracer_profile (MODEL_ATMOS, nt, Atm(1)%q(isc:iec,jsc:jec,:,nt) ) @@ -534,10 +533,9 @@ end subroutine fv_io_read_tracers !>@brief The subroutine 'remap_restart' remaps the model state from remap files !! to a new set of Eulerian coordinates. !>@details Use if npz (run time z-dimension) /= npz_rst (restart z-dimension) - subroutine remap_restart(fv_domain,Atm) + subroutine remap_restart(Atm) use fv_mapz_mod, only: rst_remap - type(domain2d), intent(inout) :: fv_domain type(fv_atmos_type), intent(inout) :: Atm(:) character(len=64) :: fname, tracer_name @@ -546,6 +544,7 @@ subroutine remap_restart(fv_domain,Atm) integer :: isd, ied, jsd, jed integer :: ntiles + type(domain2d) :: fv_domain type(FmsNetcdfDomainFile_t) :: FV_tile_restart_r, Tra_restart_r type(FmsNetcdfFile_t) :: Fv_restart_r integer, allocatable, dimension(:) :: pes !< Array of the pes in the current pelist @@ -560,6 +559,7 @@ subroutine remap_restart(fv_domain,Atm) integer npz, npz_rst, ng integer i,j,k + fv_domain = Atm(1)%domain_for_read npz = Atm(1)%npz ! run time z dimension npz_rst = Atm(1)%flagstruct%npz_rst ! restart z dimension isc = Atm(1)%bd%isc; iec = Atm(1)%bd%iec; jsc = Atm(1)%bd%jsc; jec = Atm(1)%bd%jec @@ -640,7 +640,7 @@ subroutine remap_restart(fv_domain,Atm) if (Atm(1)%Rsf_restart_is_open) then Atm%flagstruct%srf_init = .true. call fv_io_register_restart(Atm(1)) - call read_restart(Atm(1)%Rsf_restart) + call read_restart(Atm(1)%Rsf_restart, ignore_checksum=Atm(1)%flagstruct%enforce_rst_cksum) call close_file(Atm(1)%Rsf_restart) Atm(1)%Rsf_restart_is_open = .false. call mpp_error(NOTE,'==> Warning from remap_restart: Expected file '//trim(fname)//' does not exist') @@ -1279,12 +1279,12 @@ subroutine fv_io_read_BCs(Atm) call fv_io_register_restart_BCs(Atm) if (Atm%neststruct%BCfile_sw_is_open) then - call read_restart_bc(Atm%neststruct%BCfile_sw) + call read_restart_bc(Atm%neststruct%BCfile_sw, ignore_checksum=Atm%flagstruct%enforce_rst_cksum) call close_file(Atm%neststruct%BCfile_sw) endif if (Atm%neststruct%BCfile_ne_is_open) then - call read_restart_bc(Atm%neststruct%BCfile_ne) + call read_restart_bc(Atm%neststruct%BCfile_ne, ignore_checksum=Atm%flagstruct%enforce_rst_cksum) call close_file(Atm%neststruct%BCfile_ne) endif diff --git a/tools/fv_mp_mod.F90 b/tools/fv_mp_mod.F90 index b6f279e31..4f2c0f2c2 100644 --- a/tools/fv_mp_mod.F90 +++ b/tools/fv_mp_mod.F90 @@ -91,7 +91,7 @@ module fv_mp_mod use mpp_domains_mod, only : mpp_group_update_initialized, mpp_do_group_update use mpp_domains_mod, only : mpp_create_group_update,mpp_reset_group_update_field use mpp_domains_mod, only : group_halo_update_type => mpp_group_update_type - use mpp_domains_mod, only: nest_domain_type + use mpp_domains_mod, only : nest_domain_type, mpp_get_io_domain_layout, mpp_get_layout, mpp_copy_domain use mpp_parameter_mod, only : WUPDATE, EUPDATE, SUPDATE, NUPDATE, XUPDATE, YUPDATE use fv_arrays_mod, only: fv_atmos_type, fv_grid_bounds_type use mpp_mod, only : mpp_get_current_pelist, mpp_set_current_pelist @@ -367,7 +367,7 @@ end subroutine mp_stop ! vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv ! !>@brief The subroutine 'domain_decomp' sets up the domain decomposition. subroutine domain_decomp(grid_num,npx,npy,nregions,grid_type,nested,layout,io_layout,bd,tile,square_domain,& - npes_per_tile,domain,domain_for_coupler,num_contact,pelist) + npes_per_tile,domain,domain_for_coupler,domain_for_read,num_contact,pelist) integer, intent(IN) :: grid_num integer, intent(IN) :: npx,npy,grid_type @@ -389,8 +389,9 @@ subroutine domain_decomp(grid_num,npx,npy,nregions,grid_type,nested,layout,io_la integer, intent(INOUT) :: pelist(:) integer, intent(OUT) :: num_contact, npes_per_tile logical, intent(OUT) :: square_domain - type(domain2D), intent(OUT) :: domain, domain_for_coupler + type(domain2D), intent(OUT) :: domain, domain_for_coupler, domain_for_read type(fv_grid_bounds_type), intent(INOUT) :: bd + integer :: l_layout(2) nx = npx-1 ny = npy-1 @@ -652,6 +653,17 @@ subroutine domain_decomp(grid_num,npx,npy,nregions,grid_type,nested,layout,io_la call mpp_define_io_domain(domain, io_layout) call mpp_define_io_domain(domain_for_coupler, io_layout) + !--- create a read domain that can be used to improve read performance + !--- if io_layout=(1,1) then read io_layout=layout (all read) + !--- if io_layout\=(1,1) then read io_layout=io_layout (no change) + l_layout = mpp_get_io_domain_layout(domain) + call mpp_copy_domain(domain, domain_for_read) + if (ALL(l_layout == 1)) then + call mpp_get_layout(domain, l_layout) + call mpp_define_io_domain(domain_for_read, l_layout) + else + call mpp_define_io_domain(domain_for_read, l_layout) + endif endif deallocate(pe_start,pe_end) diff --git a/tools/fv_restart.F90 b/tools/fv_restart.F90 index bcd6ea13b..313ef7355 100644 --- a/tools/fv_restart.F90 +++ b/tools/fv_restart.F90 @@ -343,7 +343,7 @@ subroutine fv_restart(fv_domain, Atm, seconds, days, cold_start, grid_type, this !3. External_ic if (Atm(n)%flagstruct%external_ic) then if( is_master() ) write(*,*) 'Calling get_external_ic' - call get_external_ic(Atm(n), Atm(n)%domain, .not. do_read_restart) + call get_external_ic(Atm(n), .not. do_read_restart) if( is_master() ) write(*,*) 'IC generated from the specified external source' !4. Restart @@ -358,11 +358,11 @@ subroutine fv_restart(fv_domain, Atm, seconds, days, cold_start, grid_type, this write(*,*) '***** End Note from FV core **************************' write(*,*) ' ' endif - call remap_restart( Atm(n)%domain, Atm(n:n) ) + call remap_restart( Atm(n:n) ) if( is_master() ) write(*,*) 'Done remapping dynamical IC' else if( is_master() ) write(*,*) 'Warm starting, calling fv_io_restart' - call fv_io_read_restart(Atm(n)%domain,Atm(n:n)) + call fv_io_read_restart(Atm(n)%domain_for_read,Atm(n:n)) !====== PJP added DA functionality ====== if (Atm(n)%flagstruct%read_increment) then ! print point in middle of domain for a sanity check From b67bc5d154da2911d20b7a07716ec645eb2a1f0e Mon Sep 17 00:00:00 2001 From: "Rusty.Benson" Date: Thu, 14 Apr 2022 16:33:29 -0400 Subject: [PATCH 2/7] rename enforce_rst_cksum to ignore_rst_cksum and change the default value for compatibility --- model/fv_arrays.F90 | 2 +- model/fv_control.F90 | 6 +++--- tools/fv_io.F90 | 16 ++++++++-------- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/model/fv_arrays.F90 b/model/fv_arrays.F90 index 6edc03d18..4a0539509 100644 --- a/model/fv_arrays.F90 +++ b/model/fv_arrays.F90 @@ -516,7 +516,7 @@ module fv_arrays_mod !----------------------------------------------------------------------------------------------- logical :: reset_eta = .false. - logical :: enforce_rst_cksum = .true. !< enfore or override data integrity restart checksums + logical :: ignore_rst_cksum = .false. !< enfore (.false.) or override (.true.) data integrity restart checksums real :: p_fac = 0.05 !< Safety factor for minimum nonhydrostatic pressures, which !< will be limited so the full pressure is no less than p_fac !< times the hydrostatic pressure. This is only of concern in mid-top diff --git a/model/fv_control.F90 b/model/fv_control.F90 index 35584b0d5..6951a15a5 100644 --- a/model/fv_control.F90 +++ b/model/fv_control.F90 @@ -291,7 +291,7 @@ subroutine fv_control_init(Atm, dt_atmos, this_grid, grids_on_this_pe, p_split, real(kind=R_GRID) , pointer :: target_lon logical , pointer :: reset_eta - logical , pointer :: enforce_rst_cksum + logical , pointer :: ignore_rst_cksum real , pointer :: p_fac real , pointer :: a_imp integer , pointer :: n_split @@ -859,7 +859,7 @@ subroutine set_namelist_pointers(Atm) regional_bcs_from_gsi => Atm%flagstruct%regional_bcs_from_gsi write_restart_with_bcs => Atm%flagstruct%write_restart_with_bcs reset_eta => Atm%flagstruct%reset_eta - enforce_rst_cksum => Atm%flagstruct%enforce_rst_cksum + ignore_rst_cksum => Atm%flagstruct%ignore_rst_cksum p_fac => Atm%flagstruct%p_fac a_imp => Atm%flagstruct%a_imp n_split => Atm%flagstruct%n_split @@ -1078,7 +1078,7 @@ subroutine read_namelist_fv_core_nml(Atm) write_coarse_restart_files,& write_coarse_diagnostics,& write_only_coarse_intermediate_restarts, & - write_coarse_agrid_vel_rst, write_coarse_dgrid_vel_rst, enforce_rst_cksum + write_coarse_agrid_vel_rst, write_coarse_dgrid_vel_rst, ignore_rst_cksum ! Read FVCORE namelist diff --git a/tools/fv_io.F90 b/tools/fv_io.F90 index b1547824e..fd5294b32 100644 --- a/tools/fv_io.F90 +++ b/tools/fv_io.F90 @@ -419,7 +419,7 @@ subroutine fv_io_read_restart(fv_domain,Atm) Atm(1)%Fv_restart_tile_is_open = open_file(Atm(1)%Fv_restart_tile, fname, "read", fv_domain, is_restart=.true.) if (Atm(1)%Fv_restart_tile_is_open) then call fv_io_register_restart(Atm(1)) - call read_restart(Atm(1)%Fv_restart_tile, ignore_checksum=Atm(1)%flagstruct%enforce_rst_cksum) + call read_restart(Atm(1)%Fv_restart_tile, ignore_checksum=Atm(1)%flagstruct%ignore_rst_cksum) call close_file(Atm(1)%Fv_restart_tile) Atm(1)%Fv_restart_tile_is_open = .false. endif @@ -429,7 +429,7 @@ subroutine fv_io_read_restart(fv_domain,Atm) Atm(1)%Tra_restart_is_open = open_file(Atm(1)%Tra_restart, fname, "read", fv_domain, is_restart=.true.) if (Atm(1)%Tra_restart_is_open) then call fv_io_register_restart(Atm(1)) - call read_restart(Atm(1)%Tra_restart, ignore_checksum=Atm(1)%flagstruct%enforce_rst_cksum) + call read_restart(Atm(1)%Tra_restart, ignore_checksum=Atm(1)%flagstruct%ignore_rst_cksum) call close_file(Atm(1)%Tra_restart) Atm(1)%Tra_restart_is_open = .false. else @@ -442,7 +442,7 @@ subroutine fv_io_read_restart(fv_domain,Atm) if (Atm(1)%Rsf_restart_is_open) then Atm(1)%flagstruct%srf_init = .true. call fv_io_register_restart(Atm(1)) - call read_restart(Atm(1)%Rsf_restart, ignore_checksum=Atm(1)%flagstruct%enforce_rst_cksum) + call read_restart(Atm(1)%Rsf_restart, ignore_checksum=Atm(1)%flagstruct%ignore_rst_cksum) call close_file(Atm(1)%Rsf_restart) Atm(1)%Rsf_restart_is_open = .false. else @@ -456,7 +456,7 @@ subroutine fv_io_read_restart(fv_domain,Atm) Atm(1)%Mg_restart_is_open = open_file(Atm(1)%Mg_restart, fname, "read", fv_domain, is_restart=.true.) if (Atm(1)%Mg_restart_is_open) then call fv_io_register_restart(Atm(1)) - call read_restart(Atm(1)%Mg_restart, ignore_checksum=Atm(1)%flagstruct%enforce_rst_cksum) + call read_restart(Atm(1)%Mg_restart, ignore_checksum=Atm(1)%flagstruct%ignore_rst_cksum) call close_file(Atm(1)%Mg_restart) Atm(1)%Mg_restart_is_open = .false. else @@ -467,7 +467,7 @@ subroutine fv_io_read_restart(fv_domain,Atm) Atm(1)%Lnd_restart_is_open = open_file(Atm(1)%Lnd_restart, fname, "read", fv_domain, is_restart=.true.) if (Atm(1)%Lnd_restart_is_open) then call fv_io_register_restart(Atm(1)) - call read_restart(Atm(1)%Lnd_restart, ignore_checksum=Atm(1)%flagstruct%enforce_rst_cksum) + call read_restart(Atm(1)%Lnd_restart, ignore_checksum=Atm(1)%flagstruct%ignore_rst_cksum) call close_file(Atm(1)%Lnd_restart) Atm(1)%Lnd_restart_is_open = .false. else @@ -640,7 +640,7 @@ subroutine remap_restart(Atm) if (Atm(1)%Rsf_restart_is_open) then Atm%flagstruct%srf_init = .true. call fv_io_register_restart(Atm(1)) - call read_restart(Atm(1)%Rsf_restart, ignore_checksum=Atm(1)%flagstruct%enforce_rst_cksum) + call read_restart(Atm(1)%Rsf_restart, ignore_checksum=Atm(1)%flagstruct%ignore_rst_cksum) call close_file(Atm(1)%Rsf_restart) Atm(1)%Rsf_restart_is_open = .false. call mpp_error(NOTE,'==> Warning from remap_restart: Expected file '//trim(fname)//' does not exist') @@ -1279,12 +1279,12 @@ subroutine fv_io_read_BCs(Atm) call fv_io_register_restart_BCs(Atm) if (Atm%neststruct%BCfile_sw_is_open) then - call read_restart_bc(Atm%neststruct%BCfile_sw, ignore_checksum=Atm%flagstruct%enforce_rst_cksum) + call read_restart_bc(Atm%neststruct%BCfile_sw, ignore_checksum=Atm%flagstruct%ignore_rst_cksum) call close_file(Atm%neststruct%BCfile_sw) endif if (Atm%neststruct%BCfile_ne_is_open) then - call read_restart_bc(Atm%neststruct%BCfile_ne, ignore_checksum=Atm%flagstruct%enforce_rst_cksum) + call read_restart_bc(Atm%neststruct%BCfile_ne, ignore_checksum=Atm%flagstruct%ignore_rst_cksum) call close_file(Atm%neststruct%BCfile_ne) endif From 619e5d8e472af1928e84a63541245c97d3e3b00e Mon Sep 17 00:00:00 2001 From: Rusty Benson Date: Tue, 14 Jun 2022 11:26:47 -0400 Subject: [PATCH 3/7] updated UFS/GFS atmosphere.F90 driver as per @BinLiu-NOAA and @junwang-noaa --- driver/fvGFS/atmosphere.F90 | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/driver/fvGFS/atmosphere.F90 b/driver/fvGFS/atmosphere.F90 index f2f68eb6e..2796c823e 100644 --- a/driver/fvGFS/atmosphere.F90 +++ b/driver/fvGFS/atmosphere.F90 @@ -324,10 +324,12 @@ subroutine atmosphere_init (Time_init, Time, Time_step, Grid_box, area) logical :: dycore_only = .false. logical :: debug = .false. logical :: sync = .false. + logical :: ignore_rst_cksum = .false. integer, parameter :: maxhr = 4096 real, dimension(maxhr) :: fdiag = 0. real :: fhmax=384.0, fhmaxhf=120.0, fhout=3.0, fhouthf=1.0,avg_max_length=3600. - namelist /atmos_model_nml/ blocksize, chksum_debug, dycore_only, debug, sync, fdiag, fhmax, fhmaxhf, fhout, fhouthf, ccpp_suite, avg_max_length + namelist /atmos_model_nml/ blocksize, chksum_debug, dycore_only, debug, sync, fdiag, fhmax, fhmaxhf, fhout, fhouthf, ccpp_suite, & + & avg_max_length, ignore_rst_cksum ! *DH 20210326 !For regional @@ -449,6 +451,15 @@ subroutine atmosphere_init (Time_init, Time, Time_step, Grid_box, area) !--- allocate pref allocate(pref(npz+1,2), dum1d(npz+1)) + ! DH* 20210326 + ! First, read atmos_model_nml namelist section - this is a workaround to avoid + ! unnecessary additional changes to the input namelists, in anticipation of the + ! implementation of a generic interface for GFDL and CCPP fast physics soon + read(input_nml_file, nml=atmos_model_nml, iostat=io) + ierr = check_nml_error(io, 'atmos_model_nml') + !write(0,'(a)') "It's me, and my physics suite is '" // trim(ccpp_suite) // "'" + ! *DH 20210326 + call fv_restart(Atm(mygrid)%domain, Atm, seconds, days, cold_start, Atm(mygrid)%gridstruct%grid_type, mygrid) fv_time = Time @@ -490,15 +501,6 @@ subroutine atmosphere_init (Time_init, Time, Time_step, Grid_box, area) ! Do CCPP fast physics initialization before call to adiabatic_init (since this calls fv_dynamics) - ! DH* 20210326 - ! First, read atmos_model_nml namelist section - this is a workaround to avoid - ! unnecessary additional changes to the input namelists, in anticipation of the - ! implementation of a generic interface for GFDL and CCPP fast physics soon - read(input_nml_file, nml=atmos_model_nml, iostat=io) - ierr = check_nml_error(io, 'atmos_model_nml') - !write(0,'(a)') "It's me, and my physics suite is '" // trim(ccpp_suite) // "'" - ! *DH 20210326 - ! For fast physics running over the entire domain, block ! and thread number are not used; set to safe values cdata%blk_no = 1 From 49f30ee71f5034311e65de3cbb19e2181d59b71e Mon Sep 17 00:00:00 2001 From: Ted Mansell <37668594+MicroTed@users.noreply.github.com> Date: Fri, 17 Jun 2022 09:03:13 -0500 Subject: [PATCH 4/7] Regional decomposition test fix (when nrows_blend > 0) (#194) * Add missing instance for hailwat * Regional bc blend changes to extend into interior halos and overlap on corners. Still not working for u and v. * atmosphere.F90 : add hailwat to check dyn_core.F90 : Fix from Jun Wang to correct sync of u,v fv_regional_bc.F90 : add check for nrows_blend > tile size; fix error when nrows_blend=1 * Explanatory comment added * Removed commented code * Clean old code --- driver/fvGFS/atmosphere.F90 | 5 +- model/dyn_core.F90 | 7 ++- model/fv_regional_bc.F90 | 91 +++++++++++++++++++++++++++---------- model/fv_sg.F90 | 16 ++++++- 4 files changed, 88 insertions(+), 31 deletions(-) diff --git a/driver/fvGFS/atmosphere.F90 b/driver/fvGFS/atmosphere.F90 index 7af45f664..f3130ba56 100644 --- a/driver/fvGFS/atmosphere.F90 +++ b/driver/fvGFS/atmosphere.F90 @@ -265,7 +265,7 @@ module atmosphere_mod logical :: cold_start = .false. ! used in initial condition integer, dimension(:), allocatable :: id_tracerdt_dyn - integer :: sphum, liq_wat, rainwat, ice_wat, snowwat, graupel, cld_amt ! condensate species tracer indices + integer :: sphum, liq_wat, rainwat, ice_wat, snowwat, graupel, hailwat, cld_amt ! condensate species tracer indices integer :: mygrid = 1 integer :: p_split = 1 @@ -403,9 +403,10 @@ subroutine atmosphere_init (Time_init, Time, Time_step, Grid_box, area) rainwat = get_tracer_index (MODEL_ATMOS, 'rainwat' ) snowwat = get_tracer_index (MODEL_ATMOS, 'snowwat' ) graupel = get_tracer_index (MODEL_ATMOS, 'graupel' ) + hailwat = get_tracer_index (MODEL_ATMOS, 'hailwat' ) cld_amt = get_tracer_index (MODEL_ATMOS, 'cld_amt') - if (max(sphum,liq_wat,ice_wat,rainwat,snowwat,graupel) > Atm(mygrid)%flagstruct%nwat) then + if (max(sphum,liq_wat,ice_wat,rainwat,snowwat,graupel,hailwat) > Atm(mygrid)%flagstruct%nwat) then call mpp_error (FATAL,' atmosphere_init: condensate species are not first in the list of & &tracers defined in the field_table') endif diff --git a/model/dyn_core.F90 b/model/dyn_core.F90 index 7806df52e..f858d8804 100644 --- a/model/dyn_core.F90 +++ b/model/dyn_core.F90 @@ -1248,7 +1248,7 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp, call timing_on('COMM_TOTAL') #ifndef ROT3 - if ( it/=n_split) & + if ( .not. flagstruct%regional .and. it/=n_split) & call start_group_halo_update(i_pack(8), u, v, domain, gridtype=DGRID_NE) #endif call timing_off('COMM_TOTAL') @@ -1351,7 +1351,10 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp, isd, ied, jsd, jed, & reg_bc_update_time,it ) - call mpp_update_domains(u, v, domain, gridtype=DGRID_NE) +#ifndef ROT3 + if (it/=n_split) & + call start_group_halo_update(i_pack(8), u, v, domain, gridtype=DGRID_NE) +#endif endif diff --git a/model/fv_regional_bc.F90 b/model/fv_regional_bc.F90 index 06a5158b4..f9d6e8c2f 100644 --- a/model/fv_regional_bc.F90 +++ b/model/fv_regional_bc.F90 @@ -410,6 +410,17 @@ subroutine setup_regional_BC(Atm & else nrows_blend=nrows_blend_in_data !<-- # of blending rows in the BC files. endif + + IF ( north_bc .or. south_bc ) THEN + IF ( nrows_blend_user > jed - nhalo_model - (jsd + nhalo_model) + 1 ) THEN + call mpp_error(FATAL,'Number of blending rows is greater than the north-south tile size!') + ENDIF + ENDIF + IF ( west_bc .or. east_bc ) THEN + IF ( nrows_blend_user > ied - nhalo_model - (isd + nhalo_model) + 1 ) THEN + call mpp_error(FATAL,'Number of blending rows is greater than the east-west tile size!') + ENDIF + ENDIF ! call check(nf90_close(ncid)) !<-- Close the BC file for now. ! @@ -4352,7 +4363,7 @@ subroutine regional_boundary_update(array & ! real,dimension(:,:,:),pointer :: bc_t0,bc_t1 !<-- Boundary data at the two bracketing times. ! - logical :: blend,call_interp + logical :: blend,call_interp,blendtmp ! !--------------------------------------------------------------------- !********************************************************************* @@ -4396,13 +4407,21 @@ subroutine regional_boundary_update(array & i2=ied+1 endif ! - j1=jsd - j2=js-1 + j1=jsd ! -2 -- outermost boundary ghost zone + j2=js-1 ! 0 -- first boundary ghost zone ! + IF ( east_bc ) THEN i1_blend=is + ELSE + i1_blend=isd !is-nhalo_model + ENDIF + IF ( west_bc ) THEN i2_blend=ie + ELSE + i2_blend=ied ! ie+nhalo_model + ENDIF if(trim(bc_vbl_name)=='uc'.or.trim(bc_vbl_name)=='v'.or.trim(bc_vbl_name)=='divgd')then - i2_blend=ie+1 + i2_blend=i2_blend+1 ! ie+1 endif j1_blend=js j2_blend=js+nrows_blend_user-1 @@ -4437,8 +4456,19 @@ subroutine regional_boundary_update(array & ! i1_blend=is i2_blend=ie - if(trim(bc_vbl_name)=='uc'.or.trim(bc_vbl_name)=='v'.or.trim(bc_vbl_name)=='divgd')then - i2_blend=ie+1 + IF ( east_bc ) THEN + i1_blend=is + ELSE + i1_blend=isd !is-nhalo_model + ENDIF + IF ( west_bc ) THEN + i2_blend=ie + ELSE + i2_blend=ied ! ie+nhalo_model + ENDIF + if(trim(bc_vbl_name)=='uc'.or.trim(bc_vbl_name)=='v'.or.trim(bc_vbl_name)=='divgd')then +! i2_blend=ie+1 + i2_blend=i2_blend+1 endif j2_blend=je if(trim(bc_vbl_name)=='u'.or.trim(bc_vbl_name)=='vc'.or.trim(bc_vbl_name)=='divgd')then @@ -4483,16 +4513,21 @@ subroutine regional_boundary_update(array & endif endif ! - i1_blend=is - i2_blend=is+nrows_blend_user-1 +! Note: Original code checked for corner region and avoided overlap, but changed this to blend corners from both boundaries + i1_blend=is + i2_blend=is+nrows_blend_user-1 + + IF ( north_bc ) THEN j1_blend=js + ELSE + j1_blend=jsd !js-nhalo_model + ENDIF + IF ( south_bc ) THEN j2_blend=je - if(north_bc)then - j1_blend=js+nrows_blend_user !<-- North BC already handles nrows_blend_user blending rows - endif - if(south_bc)then - j2_blend=je-nrows_blend_user !<-- South BC already handles nrows_blend_user blending rows - endif + ELSE + j2_blend=jed ! ie+nhalo_model + ENDIF + if(trim(bc_vbl_name)=='u'.or.trim(bc_vbl_name)=='vc'.or.trim(bc_vbl_name)=='divgd')then j2_blend=j2_blend+1 endif @@ -4538,16 +4573,20 @@ subroutine regional_boundary_update(array & endif endif ! +! Note: Original code checked for corner region and avoided overlap, but changed this to blend corners from both boundaries i1_blend=i1-nrows_blend_user i2_blend=i1-1 + + IF ( north_bc ) THEN j1_blend=js + ELSE + j1_blend=jsd !is-nhalo_model + ENDIF + IF ( south_bc ) THEN j2_blend=je - if(north_bc)then - j1_blend=js+nrows_blend_user !<-- North BC already handled nrows_blend_user blending rows. - endif - if(south_bc)then - j2_blend=je-nrows_blend_user !<-- South BC already handled nrows_blend_user blending rows. - endif + ELSE + j2_blend=jed ! ie+nhalo_model + ENDIF if(trim(bc_vbl_name)=='u'.or.trim(bc_vbl_name)=='vc'.or.trim(bc_vbl_name)=='divgd')then j2_blend=j2_blend+1 endif @@ -4563,6 +4602,8 @@ subroutine regional_boundary_update(array & !*** then update the boundary points. !--------------------------------------------------------------------- ! + + if(call_interp)then ! call retrieve_bc_variable_data(bc_vbl_name & @@ -4787,7 +4828,7 @@ subroutine bc_time_interpolation(array & ! !--------------------------------------------------------------------- -! +! Set values in the boundary points only do k=1,ubnd_z do j=j1,j2 do i=i1,i2 @@ -4818,7 +4859,7 @@ subroutine bc_time_interpolation(array & !----------- ! if(nside==1.and.north_bc)then - rdenom=1./real(j2_blend-j_bc-1) + rdenom=1./real(Max(1,j2_blend-j_bc-1)) do k=1,ubnd_z do j=j1_blend,j2_blend factor_dist=exp(-(blend_exp1+blend_exp2*(j-j_bc-1)*rdenom)) !<-- Exponential falloff of blending weights. @@ -4837,7 +4878,7 @@ subroutine bc_time_interpolation(array & !----------- ! if(nside==2.and.south_bc)then - rdenom=1./real(j_bc-j1_blend-1) + rdenom=1./real(Max(1,j_bc-j1_blend-1)) do k=1,ubnd_z do j=j1_blend,j2_blend factor_dist=exp(-(blend_exp1+blend_exp2*(j_bc-j-1)*rdenom)) !<-- Exponential falloff of blending weights. @@ -4855,7 +4896,7 @@ subroutine bc_time_interpolation(array & !---------- ! if(nside==3.and.east_bc)then - rdenom=1./real(i2_blend-i_bc-1) + rdenom=1./real(Max(1,i2_blend-i_bc-1)) do k=1,ubnd_z do j=j1_blend,j2_blend do i=i1_blend,i2_blend @@ -4876,7 +4917,7 @@ subroutine bc_time_interpolation(array & !---------- ! if(nside==4.and.west_bc)then - rdenom=1./real(i_bc-i1_blend-1) + rdenom=1./real(Max(1, i_bc-i1_blend-1)) do k=1,ubnd_z do j=j1_blend,j2_blend do i=i1_blend,i2_blend diff --git a/model/fv_sg.F90 b/model/fv_sg.F90 index 2446b1144..e60693203 100644 --- a/model/fv_sg.F90 +++ b/model/fv_sg.F90 @@ -404,12 +404,18 @@ subroutine fv_subgrid_z( isd, ied, jsd, jed, is, ie, js, je, km, nq, dt, & qcon(i,k) = q0(i,k,liq_wat)+q0(i,k,ice_wat)+q0(i,k,snowwat)+q0(i,k,rainwat) enddo enddo - else + elseif ( nwat==6 ) then do k=1,kbot do i=is,ie qcon(i,k) = q0(i,k,liq_wat)+q0(i,k,ice_wat)+q0(i,k,snowwat)+q0(i,k,rainwat)+q0(i,k,graupel) enddo enddo + elseif ( nwat==7 ) then + do k=1,kbot + do i=is,ie + qcon(i,k) = q0(i,k,liq_wat)+q0(i,k,ice_wat)+q0(i,k,snowwat)+q0(i,k,rainwat)+q0(i,k,graupel)+q0(i,k,hailwat) + enddo + enddo endif do k=kbot, 2, -1 @@ -985,12 +991,18 @@ subroutine fv_subgrid_z( isd, ied, jsd, jed, is, ie, js, je, km, nq, dt, & do i=is,ie qcon(i,k) = q0(i,k,liq_wat)+q0(i,k,ice_wat)+q0(i,k,snowwat)+q0(i,k,rainwat) enddo - else + elseif ( nwat==6 ) then do k=1,kbot do i=is,ie qcon(i,k) = q0(i,k,liq_wat)+q0(i,k,ice_wat)+q0(i,k,snowwat)+q0(i,k,rainwat)+q0(i,k,graupel) enddo enddo + elseif ( nwat==7 ) then + do k=1,kbot + do i=is,ie + qcon(i,k) = q0(i,k,liq_wat)+q0(i,k,ice_wat)+q0(i,k,snowwat)+q0(i,k,rainwat)+q0(i,k,graupel)+q0(i,k,hailwat) + enddo + enddo endif do k=kbot, 2, -1 From 153cd903f8f95a7bc41fb242fe96fd7cdd4c2b64 Mon Sep 17 00:00:00 2001 From: "Samuel Trahan (NOAA contractor)" <39415369+SamuelTrahanNOAA@users.noreply.github.com> Date: Tue, 19 Jul 2022 09:35:03 -0400 Subject: [PATCH 5/7] In fv_fill.F90, use kind_phys for kind_phys instead of hard-coding 8 byte reals. (#193) --- model/fv_fill.F90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model/fv_fill.F90 b/model/fv_fill.F90 index faab16fdf..5579c7221 100644 --- a/model/fv_fill.F90 +++ b/model/fv_fill.F90 @@ -37,7 +37,7 @@ module fv_fill_mod ! use mpp_domains_mod, only: mpp_update_domains, domain2D - use platform_mod, only: kind_phys => r8_kind + use GFS_typedefs, only: kind_phys implicit none public fillz From 9c576597d365db42f4b0cc17992c5f9f1cabca05 Mon Sep 17 00:00:00 2001 From: Mark Potts <33099090+mark-a-potts@users.noreply.github.com> Date: Mon, 29 Aug 2022 09:50:32 -0400 Subject: [PATCH 6/7] Expose remap_scalar and remap_dwinds to fv3-jedi (#199) * changed interface to public * added public * removed source * mods for jedi build * Transfer changes from PR #202 to #199 Made small changes from PR #202 manually. * returned ignore checksum * fixed ignore checksum --- CMakeLists.txt | 5 +++-- tools/external_ic.F90 | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 37b0066fa..415a0068f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -54,7 +54,8 @@ endif() if(NOT FMS_FOUND) find_package(FMS REQUIRED COMPONENTS ${kind}) - add_library(fms ALIAS FMS::fms_${kind}) + string(TOLOWER ${kind} kind_lower) + add_library(fms ALIAS FMS::fms_${kind_lower}) endif() list(APPEND moving_srcs @@ -189,7 +190,7 @@ set_property(SOURCE model/fv_mapz.F90 APPEND_STRING PROPERTY COMPILE_FLAGS "${F set_property(SOURCE tools/fv_diagnostics.F90 APPEND_STRING PROPERTY COMPILE_FLAGS "-O0") set_target_properties(fv3 PROPERTIES Fortran_MODULE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/include/fv3) -target_include_directories(fv3 INTERFACE $ +target_include_directories(fv3 PUBLIC $ $) target_include_directories(fv3 PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} diff --git a/tools/external_ic.F90 b/tools/external_ic.F90 index cec8bd1f3..6be85a45f 100644 --- a/tools/external_ic.F90 +++ b/tools/external_ic.F90 @@ -202,7 +202,7 @@ module external_ic_mod ! Include variable "version" to be written to log file. #include - public get_external_ic, get_cubed_sphere_terrain + public get_external_ic, get_cubed_sphere_terrain, remap_scalar, remap_dwinds contains From aa42f6e135839492b0a3b80fc3f2c25d766ad437 Mon Sep 17 00:00:00 2001 From: "Samuel Trahan (NOAA contractor)" <39415369+SamuelTrahanNOAA@users.noreply.github.com> Date: Mon, 17 Oct 2022 05:41:30 -0600 Subject: [PATCH 7/7] Fix several bugs in fv_regional_bc.F90 relating to uninitialized or incorrectly initialized memory. (#219) * fixes and workarounds for uninitialized memory in fv_regional_bc * remove workarounds and fix remaining known bugs in ps_reg * a few more surface pressure bug fixes; now the test case runs in debug mode * workarounds and bug fixes from gnu compiler testing * remove -9999999 commented-out code * quiet the NaNs passed to Atmp%ps * simplify comments and explain snan * use i-1 & j-1 for two-point averages, when available * Replace many changes with PR #220 --- model/fv_regional_bc.F90 | 129 ++++++++++++++++++++++++++++++++------- 1 file changed, 106 insertions(+), 23 deletions(-) diff --git a/model/fv_regional_bc.F90 b/model/fv_regional_bc.F90 index f9d6e8c2f..b7c25e3a3 100644 --- a/model/fv_regional_bc.F90 +++ b/model/fv_regional_bc.F90 @@ -254,6 +254,67 @@ module fv_regional_mod logical :: data_source_fv3gfs contains + +!----------------------------------------------------------------------- +! + logical function is_not_finite(val) +! +!----------------------------------------------------------------------- +!*** This routine is equivalent to ".not. ieee_is_finite(val)" +!*** which returns .true. for infinite and Not a Number (NaN), or +!*** .false. otherwise. It's here as a workaround for this gfortran bug: +!*** +!*** https://gcc.gnu.org/bugzilla/show_bug.cgi?id=82207 +!*** +!*** The compiler must use IEEE-standard floating point for this to work +!----------------------------------------------------------------------- +! + use, intrinsic :: iso_c_binding, only: c_int32_t, c_int64_t + implicit none +! +!----------------------------------------------------------------------- +! Portability note: shiftr() is part of Fortran 2008, but it is widely +! supported in older compilers. +!----------------------------------------------------------------------- +! + intrinsic shiftr, transfer, iand ! <--- declare intrinsic to help older compilers +! +!----------------------------------------------------------------------- +! Use value-based argument passing instead of reference-based to avoid +! signaling a NaN on conversion to addressable storage. +!----------------------------------------------------------------------- +! + real, value :: val ! <-- bit pattern to test for infinity or NaN +! +!----------------------------------------------------------------------- +! Bit manipulation constants for testing 32-bit floating-point +! non-finite values. +!----------------------------------------------------------------------- +! +#ifdef OVERLOAD_R4 + integer(c_int32_t), parameter :: check = 255 ! <-- all bits on, size of exponent (8 bits) + integer, parameter :: shift = 23 ! <-- number of mantissa bits except sign +! +!----------------------------------------------------------------------- +! Bit manipulation constants for testing 64-bit floating-point +! non-finite values. +!----------------------------------------------------------------------- +! +#else + integer(c_int64_t), parameter :: check = 2047 ! <-- all bits on, size of exponent (11 bits) + integer, parameter :: shift = 52 ! <-- number of mantissa bits except sign +#endif +! +!----------------------------------------------------------------------- +! For IEEE standard floating point numbers, non-finite values follow +! a mandatory bit pattern. They have the mantissa sign bit on, and all +! exponent bits on, except the exponent sign which can be on or off. +!----------------------------------------------------------------------- +! + is_not_finite = iand(shiftr(transfer(val,check),shift),check)==check +! + end function is_not_finite +! !----------------------------------------------------------------------- ! subroutine setup_regional_BC(Atm & @@ -765,8 +826,8 @@ subroutine setup_regional_BC(Atm & !*** reference pressure profile. Compute it now. !----------------------------------------------------------------------- ! - allocate(pref(npz+1)) - allocate(dum1d(npz+1)) + allocate(pref(npz+1)) ; pref=real_snan + allocate(dum1d(npz+1)) ; dum1d=real_snan ! ps1=101325. pref(npz+1)=ps1 @@ -951,7 +1012,7 @@ subroutine compute_regional_bc_indices(regional_bc_bounds) regional_bc_bounds%ie_north_uvs=ied ! regional_bc_bounds%js_north_uvs=jsd - regional_bc_bounds%je_north_uvs=nrows_blend+1 + regional_bc_bounds%je_north_uvs=nrows_blend ! regional_bc_bounds%is_north_uvw=isd regional_bc_bounds%ie_north_uvw=ied+1 @@ -968,7 +1029,7 @@ subroutine compute_regional_bc_indices(regional_bc_bounds) regional_bc_bounds%is_south_uvs=isd regional_bc_bounds%ie_south_uvs=ied ! - regional_bc_bounds%js_south_uvs=jed-nhalo_model-nrows_blend+1 + regional_bc_bounds%js_south_uvs=jed-nhalo_model-nrows_blend+2 regional_bc_bounds%je_south_uvs=jed+1 ! regional_bc_bounds%is_south_uvw=isd @@ -1028,7 +1089,7 @@ subroutine compute_regional_bc_indices(regional_bc_bounds) regional_bc_bounds%je_west_uvs=jed-nhalo_model+1 endif ! - regional_bc_bounds%is_west_uvw=ied-nhalo_model-nrows_blend+1 + regional_bc_bounds%is_west_uvw=ied-nhalo_model-nrows_blend+2 regional_bc_bounds%ie_west_uvw=ied+1 ! regional_bc_bounds%js_west_uvw=jsd @@ -1307,8 +1368,8 @@ subroutine start_regional_cold_start(Atm, ak, bk, levp & enddo enddo ! - allocate (ak_in(1:levp+1)) !<-- Save the input vertical structure for - allocate (bk_in(1:levp+1)) ! remapping BC updates during the forecast. + allocate (ak_in(1:levp+1)) ; ak_in=real_snan !<-- Save the input vertical structure for + allocate (bk_in(1:levp+1)) ; bk_in=real_snan ! remapping BC updates during the forecast. do k=1,levp+1 ak_in(k)=ak(k) bk_in(k)=bk(k) @@ -1402,9 +1463,9 @@ subroutine start_regional_restart(Atm & ,isd, ied, jsd, jed & ,Atm%npx, Atm%npy ) ! - allocate (wk2(levp+1,2)) - allocate (ak_in(levp+1)) !<-- Save the input vertical structure for - allocate (bk_in(levp+1)) ! remapping BC updates during the forecast. + allocate (wk2(levp+1,2)) ; wk2=real_snan + allocate (ak_in(levp+1)) ; ak_in=real_snan !<-- Save the input vertical structure for + allocate (bk_in(levp+1)) ; bk_in=real_snan ! remapping BC updates during the forecast. if (Atm%flagstruct%hrrrv3_ic) then if (open_file(Grid_input, 'INPUT/hrrr_ctrl.nc', "read", pelist=pes)) then call read_data(Grid_input,'vcoord',wk2) @@ -1908,7 +1969,8 @@ subroutine regional_bc_data(Atm,bc_hour & !*** the integration levels. !----------------------------------------------------------------------- ! - allocate(ps_reg(is_input:ie_input,js_input:je_input)) ; ps_reg=-9999999 ! for now don't set to snan until remap dwinds is changed + allocate(ps_reg(is_input:ie_input,js_input:je_input)) !<-- Sfc pressure in domain's boundary region derived from BC files + ps_reg=real_snan !<-- detect access of uninitialized pressures ! !----------------------------------------------------------------------- !*** We have the boundary variables from the BC file on the levels @@ -3545,6 +3607,12 @@ subroutine remap_scalar_nggps_regional_bc(Atm & je=js_bc+nhalo_data+nrows_blend-1 endif ! +! Ensure uninitialized memory isn't used + pn0 = real_snan + pn1 = real_snan + gz_fv = real_snan + gz = real_snan + pn = real_snan allocate(pe0(is:ie,km+1)) ; pe0=real_snan allocate(qn1(is:ie,npz)) ; qn1=real_snan allocate(dp2(is:ie,npz)) ; dp2=real_snan @@ -3575,13 +3643,14 @@ subroutine remap_scalar_nggps_regional_bc(Atm & pn(k) = 2.*pn(km+1) - pn(l) enddo + pst = real_snan do k=km+k2-1, 2, -1 if( phis_reg(i,j).le.gz(k) .and. phis_reg(i,j).ge.gz(k+1) ) then pst = pn(k) + (pn(k+1)-pn(k))*(gz(k)-phis_reg(i,j))/(gz(k)-gz(k+1)) - go to 123 + exit endif enddo - 123 ps(i,j) = exp(pst) + ps(i,j) = exp(pst) enddo ! i-loop @@ -3594,10 +3663,10 @@ subroutine remap_scalar_nggps_regional_bc(Atm & !*** the Atm object. !--------------------------------------------------------------------------------- ! - is=lbound(Atm%ps,1) - ie=ubound(Atm%ps,1) - js=lbound(Atm%ps,2) - je=ubound(Atm%ps,2) + is=min(ubound(Atm%ps,1),max(lbound(Atm%ps,1),is)) + ie=min(ubound(Atm%ps,1),max(lbound(Atm%ps,1),ie)) + js=min(ubound(Atm%ps,2),max(lbound(Atm%ps,2),js)) + je=min(ubound(Atm%ps,2),max(lbound(Atm%ps,2),je)) ! do j=js,je do i=is,ie @@ -4864,6 +4933,9 @@ subroutine bc_time_interpolation(array & do j=j1_blend,j2_blend factor_dist=exp(-(blend_exp1+blend_exp2*(j-j_bc-1)*rdenom)) !<-- Exponential falloff of blending weights. do i=i1_blend,i2_blend + if(is_not_finite(array(i,j,k))) then + cycle ! Outside boundary + endif blend_value=bc_t0(i,j,k) & !<-- Blend data interpolated +(bc_t1(i,j,k)-bc_t0(i,j,k))*fraction_interval ! between t0 and t1. ! @@ -4883,6 +4955,9 @@ subroutine bc_time_interpolation(array & do j=j1_blend,j2_blend factor_dist=exp(-(blend_exp1+blend_exp2*(j_bc-j-1)*rdenom)) !<-- Exponential falloff of blending weights. do i=i1_blend,i2_blend + if(is_not_finite(array(i,j,k))) then + cycle ! Outside boundary + endif blend_value=bc_t0(i,j,k) & !<-- Blend data interpolated +(bc_t1(i,j,k)-bc_t0(i,j,k))*fraction_interval ! between t0 and t1. array(i,j,k)=(1.-factor_dist)*array(i,j,k)+factor_dist*blend_value @@ -4900,6 +4975,9 @@ subroutine bc_time_interpolation(array & do k=1,ubnd_z do j=j1_blend,j2_blend do i=i1_blend,i2_blend + if(is_not_finite(array(i,j,k))) then + cycle ! Outside boundary + endif ! blend_value=bc_t0(i,j,k) & !<-- Blend data interpolated +(bc_t1(i,j,k)-bc_t0(i,j,k))*fraction_interval ! between t0 and t1. @@ -4921,6 +4999,9 @@ subroutine bc_time_interpolation(array & do k=1,ubnd_z do j=j1_blend,j2_blend do i=i1_blend,i2_blend + if(is_not_finite(array(i,j,k))) then + cycle ! Outside boundary + endif ! blend_value=bc_t0(i,j,k) & !<-- Blend data interpolated +(bc_t1(i,j,k)-bc_t0(i,j,k))*fraction_interval ! between t0 and t1. @@ -5727,7 +5808,7 @@ subroutine dump_field_3d (domain, name, field, isd, ied, jsd, jed, nlev, stag) nyg = npy + 2*halo + jext nz = size(field,dim=3) - allocate( glob_field(isg-halo:ieg+halo+iext, jsg-halo:jeg+halo+jext, 1:nz) ) + allocate( glob_field(isg-halo:ieg+halo+iext, jsg-halo:jeg+halo+jext, 1:nz) ) ; glob_field=real_snan isection_s = is isection_e = ie @@ -5846,7 +5927,7 @@ subroutine dump_field_2d (domain, name, field, isd, ied, jsd, jed, stag) nxg = npx + 2*halo + iext nyg = npy + 2*halo + jext - allocate( glob_field(isg-halo:ieg+halo+iext, jsg-halo:jeg+halo+jext) ) + allocate( glob_field(isg-halo:ieg+halo+iext, jsg-halo:jeg+halo+jext) ) ; glob_field=real_snan isection_s = is isection_e = ie @@ -6154,6 +6235,7 @@ subroutine write_full_fields(Atm) nz=size(fields_core(nv)%ptr,3) ! allocate( global_field(istart_g:iend_g, jstart_g:jend_g, 1:nz) ) + global_field=real_snan ! !----------------------------------------------------------------------- !*** What is the local extent of the variable on the task subdomain? @@ -6258,6 +6340,7 @@ subroutine write_full_fields(Atm) nz=size(fields_tracers(1)%ptr,3) ! allocate( global_field(istart_g:iend_g, jstart_g:jend_g, 1:nz) ) + global_field=real_snan ! !----------------------------------------------------------------------- !*** What is the local extent of the variable on the task subdomain? @@ -6614,10 +6697,10 @@ subroutine exch_uv(domain, bd, npz, u, v) ! buf1 and buf4 must be of the same size (sim. for buf2 and buf3). ! Changes to the code below should be tested with debug flags ! enabled (out-of-bounds reads/writes). - allocate(buf1(1:24*npz)) - allocate(buf2(1:36*npz)) - allocate(buf3(1:36*npz)) - allocate(buf4(1:24*npz)) + allocate(buf1(1:24*npz)) ; buf1=real_snan + allocate(buf2(1:36*npz)) ; buf2=real_snan + allocate(buf3(1:36*npz)) ; buf3=real_snan + allocate(buf4(1:24*npz)) ; buf4=real_snan ! FIXME: MPI_COMM_WORLD