diff --git a/driver/fvGFS/atmosphere.F90 b/driver/fvGFS/atmosphere.F90 index 6fa14008b..8744b3df9 100644 --- a/driver/fvGFS/atmosphere.F90 +++ b/driver/fvGFS/atmosphere.F90 @@ -229,7 +229,8 @@ module atmosphere_mod atmosphere_get_bottom_layer, & atmosphere_nggps_diag, & get_bottom_mass, get_bottom_wind, & - get_stock_pe, set_atmosphere_pelist + get_stock_pe, set_atmosphere_pelist, & + get_nth_domain_info !--- physics/radiation data exchange routines public :: atmos_phys_driver_statein @@ -918,15 +919,30 @@ subroutine set_atmosphere_pelist () end subroutine set_atmosphere_pelist + subroutine get_nth_domain_info(n, layout, nx, ny, pelist) + integer, intent(in) :: n + integer, intent(out) :: layout(2) + integer, intent(out) :: nx, ny + integer, pointer, intent(out) :: pelist(:) + + layout(1:2) = Atm(n)%layout(1:2) + nx = Atm(n)%npx -1 + ny = Atm(n)%npy -1 + pelist => Atm(n)%pelist + + end subroutine get_nth_domain_info + !>@brief The subroutine 'atmosphere_domain' is an API to return !! the "domain2d" variable associated with the coupling grid and the !! decomposition for the current cubed-sphere tile. !>@detail Coupling is done using the mass/temperature grid with no halos. - subroutine atmosphere_domain ( fv_domain, layout, regional, nested, pelist ) + subroutine atmosphere_domain ( fv_domain, layout, regional, nested, ngrids_atmos, mygrid_atmos, pelist ) type(domain2d), intent(out) :: fv_domain integer, intent(out) :: layout(2) logical, intent(out) :: regional logical, intent(out) :: nested + integer, intent(out) :: ngrids_atmos + integer, intent(out) :: mygrid_atmos integer, pointer, intent(out) :: pelist(:) ! returns the domain2d variable associated with the coupling grid ! note: coupling is done using the mass/temperature grid with no halos @@ -935,6 +951,8 @@ subroutine atmosphere_domain ( fv_domain, layout, regional, nested, pelist ) layout(1:2) = Atm(mygrid)%layout(1:2) regional = Atm(mygrid)%flagstruct%regional nested = ngrids > 1 + ngrids_atmos = ngrids + mygrid_atmos = mygrid call set_atmosphere_pelist() pelist => Atm(mygrid)%pelist diff --git a/driver/fvGFS/fv_nggps_diag.F90 b/driver/fvGFS/fv_nggps_diag.F90 index da7a176bd..04a23a6b3 100644 --- a/driver/fvGFS/fv_nggps_diag.F90 +++ b/driver/fvGFS/fv_nggps_diag.F90 @@ -1007,6 +1007,7 @@ subroutine fv_dyn_bundle_setup(axes, dyn_bundle, fcst_grid, quilting, rc) line=__LINE__, & file=__FILE__)) & return ! bail out + deallocate(axis_name_vert) endif do id = 1,num_axes @@ -1349,6 +1350,8 @@ subroutine fv_dyn_bundle_setup(axes, dyn_bundle, fcst_grid, quilting, rc) ! name="output_file", value=fld_outfilename, rc=rc) ! print *,'in dyn bundle setup, i=',i,' fieldname=',trim(fieldnamelist(i)),' out filename=',trim(fld_outfilename) ! enddo + deallocate(axis_name) + deallocate(all_axes) end subroutine fv_dyn_bundle_setup diff --git a/model/dyn_core.F90 b/model/dyn_core.F90 index 2e6173f72..7806df52e 100644 --- a/model/dyn_core.F90 +++ b/model/dyn_core.F90 @@ -561,13 +561,13 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp, isd, ied, jsd, jed, npz, & is, ie, js, je, & isd, ied, jsd, jed, & - reg_bc_update_time ) + reg_bc_update_time,it ) #ifndef SW_DYNAMICS call regional_boundary_update(ptc, 'pt', & isd, ied, jsd, jed, npz, & is, ie, js, je, & isd, ied, jsd, jed, & - reg_bc_update_time ) + reg_bc_update_time,it ) #endif endif if ( hydrostatic ) then @@ -727,12 +727,12 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp, isd, ied, jsd, jed+1, npz, & is, ie, js, je, & isd, ied, jsd, jed, & - reg_bc_update_time ) + reg_bc_update_time,it ) call regional_boundary_update(uc, 'uc', & isd, ied+1, jsd, jed, npz, & is, ie, js, je, & isd, ied, jsd, jed, & - reg_bc_update_time ) + reg_bc_update_time,it ) call mpp_update_domains(uc, vc, domain, gridtype=CGRID_NE) !!! Currently divgd is always 0.0 in the regional domain boundary area. reg_bc_update_time=current_time_in_seconds+bdt*(n_map-1)+(it-1)*dt @@ -740,7 +740,7 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp, isd, ied+1, jsd, jed+1, npz, & is, ie, js, je, & isd, ied, jsd, jed, & - reg_bc_update_time ) + reg_bc_update_time,it ) endif if ( flagstruct%inline_q ) then @@ -758,7 +758,7 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp, isd, ied, jsd, jed, npz, & is, ie, js, je, & isd, ied, jsd, jed, & - reg_bc_update_time ) + reg_bc_update_time,it ) enddo endif @@ -996,20 +996,20 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp, isd, ied, jsd, jed, npz, & is, ie, js, je, & isd, ied, jsd, jed, & - reg_bc_update_time ) + reg_bc_update_time,it ) #ifndef SW_DYNAMICS call regional_boundary_update(pt, 'pt', & isd, ied, jsd, jed, npz, & is, ie, js, je, & isd, ied, jsd, jed, & - reg_bc_update_time ) + reg_bc_update_time,it ) #ifdef USE_COND call regional_boundary_update(q_con, 'q_con', & isd, ied, jsd, jed, npz, & is, ie, js, je, & isd, ied, jsd, jed, & - reg_bc_update_time ) + reg_bc_update_time,it ) #endif #endif @@ -1329,14 +1329,14 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp, if (flagstruct%regional) then + reg_bc_update_time=current_time_in_seconds+bdt*(n_map-1)+it*dt #ifndef SW_DYNAMICS if (.not. hydrostatic) then - reg_bc_update_time=current_time_in_seconds+bdt*(n_map-1)+it*dt call regional_boundary_update(w, 'w', & isd, ied, jsd, jed, ubound(w,3), & is, ie, js, je, & isd, ied, jsd, jed, & - reg_bc_update_time ) + reg_bc_update_time,it ) endif #endif SW_DYNAMICS @@ -1344,12 +1344,12 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp, isd, ied, jsd, jed+1, npz, & is, ie, js, je, & isd, ied, jsd, jed, & - reg_bc_update_time ) + reg_bc_update_time,it ) call regional_boundary_update(v, 'v', & isd, ied+1, jsd, jed, npz, & is, ie, js, je, & isd, ied, jsd, jed, & - reg_bc_update_time ) + reg_bc_update_time,it ) call mpp_update_domains(u, v, domain, gridtype=DGRID_NE) diff --git a/model/fv_dynamics.F90 b/model/fv_dynamics.F90 index 2dc440c49..aca34c80e 100644 --- a/model/fv_dynamics.F90 +++ b/model/fv_dynamics.F90 @@ -803,7 +803,7 @@ subroutine fv_dynamics(npx, npy, npz, nq_tot, ng, bdt, consv_te, fill, isd, ied, jsd, jed, npz, & is, ie, js, je, & isd, ied, jsd, jed, & - reg_bc_update_time ) + reg_bc_update_time,1 ) endif #endif diff --git a/model/fv_regional_bc.F90 b/model/fv_regional_bc.F90 index f2e589e24..1d83443b8 100644 --- a/model/fv_regional_bc.F90 +++ b/model/fv_regional_bc.F90 @@ -383,18 +383,12 @@ subroutine setup_regional_BC(Atm & file_name='INPUT/gfs_bndy.tile7.'//int_to_char//'_gsi.nc' !<-- The DA-updated BC file. endif ! - if (is_master()) then - write(*,20011)trim(file_name) -20011 format(' regional_bc_data file_name=',a) - endif !----------------------------------------------------------------------- !*** Open the regional BC file. !----------------------------------------------------------------------- ! call check(nf90_open(file_name,nf90_nowrite,ncid)) !<-- Open the BC file; get the file ID. - if (is_master()) then - write(0,*)' opened BC file ',trim(file_name) - endif + call mpp_error(NOTE, 'Opened BC file: '//trim(file_name)) ! !----------------------------------------------------------------------- !*** Check if the desired number of blending rows are present in @@ -1075,7 +1069,7 @@ subroutine read_regional_lon_lat ! call check(nf90_open(filename,nf90_nowrite,ncid_grid)) !<-- Open the grid data netcdf file; get the file ID. ! - call mpp_error(NOTE,' opened grid file '//trim(filename)) + call mpp_error(NOTE, 'Opened grid file: '//trim(filename)) ! !----------------------------------------------------------------------- !*** The longitude and latitude are on the super grid. We need only @@ -1170,12 +1164,9 @@ subroutine read_regional_filtered_topo ! filename='INPUT/'//trim(oro_data) - if (is_master()) then - write(*,23421)trim(filename) -23421 format(' topo filename=',a) - endif ! call check(nf90_open(filename,nf90_nowrite,ncid_oro)) !<-- Open the netcdf file; get the file ID. + call mpp_error(NOTE, 'Opened topo file: '//trim(filename)) ! !----------------------------------------------------------------------- !*** Read in the data including the extra outer row. @@ -1661,19 +1652,13 @@ subroutine regional_bc_data(Atm,bc_hour & file_name='INPUT/gfs_bndy.tile7.'//int_to_char//'_gsi.nc' !<-- The DA-updated BC file. endif ! - if (is_master()) then - write(*,22211)trim(file_name) -22211 format(' regional_bc_data file_name=',a) - endif !----------------------------------------------------------------------- !*** Open the regional BC file. !*** Find the # of layers (klev_in) in the BC input. !----------------------------------------------------------------------- ! call check(nf90_open(file_name,nf90_nowrite,ncid)) !<-- Open the BC file; get the file ID. - if (is_master()) then - write(0,*)' opened BC file ',trim(file_name) - endif + call mpp_error(NOTE, 'Opened BC file: '//trim(file_name)) ! call check(nf90_inq_dimid(ncid,'lev',dimid)) !<-- Get the vertical dimension's NetCDF ID. call check(nf90_inquire_dimension(ncid,dimid,len=klev_in)) !<-- Get the vertical dimension's value (klev_in). @@ -3305,7 +3290,9 @@ subroutine read_regional_bc_file(is_input,ie_input & call check(status) endif if (status /= nf90_noerr) then - if (east_bc.and.is_master()) write(0,*)' WARNING: Tracer ',trim(var_name),' not in input file' + if (east_bc) then + call mpp_error(NOTE, 'Tracer '//trim(var_name)//' not in input file') + endif array_4d(:,:,:,tlev)=0. !<-- Tracer not in input so set to zero in boundary. ! blend_this_tracer(tlev)=.false. !<-- Tracer not in input so do not apply blending. @@ -3877,7 +3864,6 @@ subroutine remap_scalar_nggps_regional_bc(Atm & enddo ! call pmaxmn('PS_diff (mb)', wk, is, ie, js, je, 1, 0.01, Atm%gridstruct%area_64, Atm%domain) deallocate (pe0,qn1,dp2,pe1,qp) - if (is_master()) write(*,*) 'done remap_scalar_nggps_regional_bc' !--------------------------------------------------------------------- end subroutine remap_scalar_nggps_regional_bc @@ -3997,8 +3983,6 @@ subroutine remap_dwinds_regional_bc(Atm & deallocate(qn1_d) deallocate(qn1_c) - if (is_master()) write(*,*) 'done remap_dwinds' - end subroutine remap_dwinds_regional_bc !--------------------------------------------------------------------- @@ -4314,7 +4298,7 @@ subroutine regional_boundary_update(array & ,is,ie,js,je & ,isd,ied,jsd,jed & ,fcst_time & - ,index4 ) + ,it,index4 ) ! !--------------------------------------------------------------------- !*** Select the given variable's boundary data at the two @@ -4332,7 +4316,8 @@ subroutine regional_boundary_update(array & integer,intent(in) :: lbnd_x,ubnd_x,lbnd_y,ubnd_y,ubnd_z !<-- Dimensions of full prognostic array to be updated. ! integer,intent(in) :: is,ie,js,je & !<-- Compute limits - ,isd,ied,jsd,jed !<-- Memory limits + ,isd,ied,jsd,jed & !<-- Memory limits + ,it !<-- Acoustic step ! integer,intent(in),optional :: index4 !<-- Index for the 4-D tracer array. ! @@ -4588,7 +4573,7 @@ subroutine regional_boundary_update(array & ,fcst_time & ,bc_update_interval & ,i1_blend,i2_blend,j1_blend,j2_blend & - ,i_bc,j_bc,nside,bc_vbl_name,blend ) + ,i_bc,j_bc,nside,bc_vbl_name,blend,it ) endif ! !--------------------------------------------------------------------- @@ -4718,7 +4703,7 @@ subroutine bc_time_interpolation(array & ,fcst_time & ,bc_update_interval & ,i1_blend,i2_blend,j1_blend,j2_blend & - ,i_bc,j_bc,nside,bc_vbl_name,blend ) + ,i_bc,j_bc,nside,bc_vbl_name,blend,it ) !--------------------------------------------------------------------- !*** Update the boundary region of the input array at the given @@ -4743,7 +4728,7 @@ subroutine bc_time_interpolation(array & ! integer,intent(in) :: is,ie,js,je !<-- Min/Max index limits on task's computational subdomain ! - integer,intent(in) :: bc_update_interval !<-- Time (hours) between BC data states + integer,intent(in) :: bc_update_interval,it !<-- Time (hours) between BC data states, acoustic step ! real,intent(in) :: fcst_time !<-- Current forecast time (sec) ! @@ -4780,6 +4765,19 @@ subroutine bc_time_interpolation(array & ! fraction_interval=mod(fcst_time,(bc_update_interval*3600.)) & /(bc_update_interval*3600.) + +!--------------------------------------------------------------------- +!*** Special check for final acoustic step prior to new boundary information +!*** being ingested. +!--------------------------------------------------------------------- + + if (fraction_interval .eq. 0.0 .and. it .gt. 1) then + fraction_interval=1.0 + if (is_master()) then + write(0,*) 'reset of fraction_interval ', trim(bc_vbl_name),it, fcst_time + endif + endif + ! !--------------------------------------------------------------------- ! @@ -6741,7 +6739,7 @@ subroutine get_data_source(data_source_fv3gfs,regional) logical, intent(out):: data_source_fv3gfs character (len=80) :: source - logical :: lstatus + logical :: lstatus = .false. type(FmsNetcdfFile_t) :: Gfs_data integer, allocatable, dimension(:) :: pes !< Array of the pes in the current pelist ! @@ -6764,7 +6762,7 @@ subroutine get_data_source(data_source_fv3gfs,regional) if (mpp_pe() == 0) write(0,*) 'INPUT source not found ',lstatus,' set source=No Source Attribute' source='No Source Attribute' endif - if (mpp_pe()==0) write(*,*) 'INPUT gfs_data source string=',source + call mpp_error(NOTE, 'INPUT gfs_data source string: '//trim(source)) ! Logical flag for fv3gfs nemsio/netcdf/grib2 -------- if ( trim(source)=='FV3GFS GAUSSIAN NEMSIO FILE' .or. & @@ -6774,7 +6772,6 @@ subroutine get_data_source(data_source_fv3gfs,regional) else data_source_fv3gfs = .FALSE. endif - if (mpp_pe()==0) write(*,*) 'data_source_fv3gfs=',data_source_fv3gfs end subroutine get_data_source diff --git a/model/fv_tracer2d.F90 b/model/fv_tracer2d.F90 index 9039839bb..a47c24179 100644 --- a/model/fv_tracer2d.F90 +++ b/model/fv_tracer2d.F90 @@ -764,7 +764,7 @@ subroutine tracer_2d_nested(q, dp1, mfx, mfy, cx, cy, gridstruct, bd, domain, np is, ie, js, je, & isd, ied, jsd, jed, & reg_bc_update_time, & - iq ) + it, iq ) enddo endif diff --git a/tools/fv_iau_mod.F90 b/tools/fv_iau_mod.F90 index 12b87e70f..703640a4f 100644 --- a/tools/fv_iau_mod.F90 +++ b/tools/fv_iau_mod.F90 @@ -253,6 +253,7 @@ subroutine IAU_initialize (IPD_Control, IAU_Data,Init_parm) allocate (iau_state%inc1%tracer_inc(is:ie, js:je, km,ntracers)) iau_state%hr1=IPD_Control%iaufhrs(1) iau_state%wt = 1.0 ! IAU increment filter weights (default 1.0) + iau_state%wt_normfact = 1.0 if (IPD_Control%iau_filter_increments) then ! compute increment filter weights, sum to obtain normalization factor dtp=IPD_control%dtp @@ -298,26 +299,31 @@ subroutine getiauforcing(IPD_Control,IAU_Data) type (IPD_control_type), intent(in) :: IPD_Control type(IAU_external_data_type), intent(inout) :: IAU_Data real(kind=kind_phys) t1,t2,sx,wx,wt,dtp - integer n,i,j,k,sphum,kstep,nstep + integer n,i,j,k,sphum,kstep,nstep,itnext IAU_Data%in_interval=.false. if (nfiles.LE.0) then return endif - t1=iau_state%hr1 - IPD_Control%iau_delthrs*0.5 - t2=iau_state%hr1 + IPD_Control%iau_delthrs*0.5 + if (nfiles .eq. 1) then + t1 = IPD_Control%iaufhrs(1)-0.5*IPD_Control%iau_delthrs + t2 = IPD_Control%iaufhrs(1)+0.5*IPD_Control%iau_delthrs + else + t1 = IPD_Control%iaufhrs(1) + t2 = IPD_Control%iaufhrs(nfiles) + endif if (IPD_Control%iau_filter_increments) then ! compute increment filter weight - ! t1 beginning of window, t2 end of window + ! t1 is beginning of window, t2 end of window ! IPD_Control%fhour current time ! in window kstep=-nstep,nstep (2*nstep+1 total) ! time step IPD_control%dtp dtp=IPD_control%dtp nstep = 0.5*IPD_Control%iau_delthrs*3600/dtp ! compute normalized filter weight - kstep = (IPD_Control%fhour-(t1+IPD_Control%iau_delthrs*0.5))*3600./dtp - if (kstep .ge. -nstep .and. kstep .le. nstep) then + kstep = ((IPD_Control%fhour-t1) - 0.5*IPD_Control%iau_delthrs)*3600./dtp + if (IPD_Control%fhour >= t1 .and. IPD_Control%fhour < t2) then sx = acos(-1.)*kstep/nstep wx = acos(-1.)*kstep/(nstep+1) if (kstep .ne. 0) then @@ -326,7 +332,7 @@ subroutine getiauforcing(IPD_Control,IAU_Data) wt = 1. endif iau_state%wt = iau_state%wt_normfact*wt - if (is_master()) print *,'filter wt',kstep,IPD_Control%fhour,iau_state%wt + !if (is_master()) print *,'kstep,t1,t,t2,filter wt=',kstep,t1,IPD_Control%fhour,t2,iau_state%wt/iau_state%wt_normfact else iau_state%wt = 0. endif @@ -340,31 +346,32 @@ subroutine getiauforcing(IPD_Control,IAU_Data) IAU_Data%in_interval=.false. else if (IPD_Control%iau_filter_increments) call setiauforcing(IPD_Control,IAU_Data,iau_state%wt) - if (is_master()) print *,'apply iau forcing',t1,IPD_Control%fhour,t2 + if (is_master()) print *,'apply iau forcing t1,t,t2,filter wt=',t1,IPD_Control%fhour,t2,iau_state%wt/iau_state%wt_normfact IAU_Data%in_interval=.true. endif return endif if (nfiles > 1) then - t2=2 - if (IPD_Control%fhour < IPD_Control%iaufhrs(1) .or. IPD_Control%fhour >= IPD_Control%iaufhrs(nfiles)) then + itnext=2 + if (IPD_Control%fhour < t1 .or. IPD_Control%fhour >= t2) then ! if (is_master()) print *,'no iau forcing',IPD_Control%iaufhrs(1),IPD_Control%fhour,IPD_Control%iaufhrs(nfiles) IAU_Data%in_interval=.false. else + if (is_master()) print *,'apply iau forcing t1,t,t2,filter wt=',t1,IPD_Control%fhour,t2,iau_state%wt/iau_state%wt_normfact IAU_Data%in_interval=.true. do k=nfiles,1,-1 if (IPD_Control%iaufhrs(k) > IPD_Control%fhour) then - t2=k + itnext=k endif enddo -! if (is_master()) print *,'t2=',t2 +! if (is_master()) print *,'itnext=',itnext if (IPD_Control%fhour >= iau_state%hr2) then ! need to read in next increment file iau_state%hr1=iau_state%hr2 - iau_state%hr2=IPD_Control%iaufhrs(int(t2)) + iau_state%hr2=IPD_Control%iaufhrs(itnext) iau_state%inc1=iau_state%inc2 - if (is_master()) print *,'reading next increment file',trim(IPD_Control%iau_inc_files(int(t2))) - call read_iau_forcing(IPD_Control,iau_state%inc2,'INPUT/'//trim(IPD_Control%iau_inc_files(int(t2)))) + if (is_master()) print *,'reading next increment file',trim(IPD_Control%iau_inc_files(itnext)) + call read_iau_forcing(IPD_Control,iau_state%inc2,'INPUT/'//trim(IPD_Control%iau_inc_files(itnext))) endif call updateiauforcing(IPD_Control,IAU_Data,iau_state%wt) endif