diff --git a/CHANGELOG.md b/CHANGELOG.md index 5b3f2935..c87f3f8e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added +- series of regrid*.py are added to GEOS_Util/post + ### Changed - Changed restart file geosachem to achem in regrid.pl diff --git a/GEOS_Util/post/CMakeLists.txt b/GEOS_Util/post/CMakeLists.txt index 3d5d901e..ce71bc27 100644 --- a/GEOS_Util/post/CMakeLists.txt +++ b/GEOS_Util/post/CMakeLists.txt @@ -50,6 +50,11 @@ install( PROGRAMS ${perlscripts} ec2grd.csh DESTINATION bin) +file(GLOB pythonscripts CONFIGURE_DEPENDS ./remap_restart/remap*) +install( + PROGRAMS ${pythonscripts} + DESTINATION bin) + set_property(SOURCE rsg3_vinterp.F APPEND_STRING PROPERTY COMPILE_FLAGS "${FREAL8} ${BYTERECLEN} ${EXTENDED_SOURCE}") set_property(SOURCE rs_vinterp.F90 APPEND_STRING PROPERTY COMPILE_FLAGS "${FREAL8} ${BYTERECLEN}") set_property(SOURCE rs_vinterp_scm.F90 APPEND_STRING PROPERTY COMPILE_FLAGS "${FREAL8} ${BYTERECLEN}") diff --git a/GEOS_Util/post/remap_restart/remap_analysis.py b/GEOS_Util/post/remap_restart/remap_analysis.py new file mode 100755 index 00000000..0990f497 --- /dev/null +++ b/GEOS_Util/post/remap_restart/remap_analysis.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python3 +# +# source install/bin/g5_modules +# +# Newer GEOS code should load a module with GEOSpyD Python3 if not run: +# module load python/GEOSpyD/Min4.10.3_py3.9 +# + +import os +from datetime import datetime, timedelta +import subprocess +import shlex +import shutil +import glob +import fileinput +import ruamel.yaml +from remap_base import remap_base + +class analysis(remap_base): + def __init__(self, **configs): + super().__init__(**configs) + + def remap(self): + config = self.config + bkg = config['output']['analysis']['bkg'] + if ( not bkg ): return + + analysis_in = self.find_analysis() + if len(analysis_in) ==0 : + print("\n There are no analysis files. \n") + return + + print("\n Remapping or copying analysis files...\n") + + cwdir = os.getcwd() + bindir = os.path.dirname(os.path.realpath(__file__)) + in_bcsdir = config['input']['shared']['bcs_dir'] + out_bcsdir = config['output']['shared']['bcs_dir'] + out_dir = config['output']['shared']['out_dir'] + if not os.path.exists(out_dir) : os.makedirs(out_dir) + print( "cd " + out_dir) + os.chdir(out_dir) + + tmpdir = out_dir+'/ana_data/' + if os.path.exists(tmpdir) : subprocess.call(['rm', '-rf',tmpdir]) + print ("mkdir " + tmpdir) + os.makedirs(tmpdir) + + print( "cd " + tmpdir) + os.chdir(tmpdir) + + yyyymmddhh_ = str(config['input']['shared']['yyyymmddhh']) + yyyy_ = yyyymmddhh_[0:4] + mm_ = yyyymmddhh_[4:6] + dd_ = yyyymmddhh_[6:8] + hh_ = yyyymmddhh_[8:10] + rst_time = datetime(year=int(yyyy_), month=int(mm_), day=int(dd_), hour = int(hh_)) + expid_in = config['input']['shared']['expid'] + expid_out = config['output']['shared']['expid'] + if (expid_out) : + expid_out = expid_out + '.' + else: + expid_out = '' + + aqua = config['output']['analysis']['aqua'] + local_fs=[] + for f in analysis_in: + print(f) + fname = os.path.basename(f) + out_name = fname.replace(expid_in + '.', expid_out) + f_tmp = tmpdir+'/'+out_name + local_fs.append(f_tmp) + shutil.copy(f,f_tmp) + if out_name.find('satbias') != -1 : + if (aqua): + f_ = open(f_tmp, 'w') + for line in fileinput.input(f): + f_.write(line.replace('airs281SUBSET_aqua', 'airs281_aqua ')) + f_.close() + + nlevel = config['output']['air']['nlevel'] + agrid_out = config['output']['shared']['agrid'] + flags = "-g5 -res " + self.get_grid_kind(agrid_out.upper()) + " -nlevs " + str(nlevel) + bkg_files = glob.glob(tmpdir+'/*.bkg??_eta_rst*') + for f in bkg_files: + f_orig = f + ".orig" + shutil.move(f,f_orig) + cmd = bindir + '/dyn2dyn.x ' + flags + ' -o ' + f + ' ' + f_orig + print(cmd) + subprocess.call(shlex.split(cmd)) + + for f in local_fs: + fname = os.path.basename(f) + shutil.move(f, out_dir+'/'+fname) + # write lcv + lcv = config['output']['analysis']['lcv'] + if lcv : + ymd_ = yyyymmddhh_[0:8] + hh_ = yyyymmddhh_[8:10] + hms_ = hh_+'0000' + rstlcvOut = out_dir+'/'+expid_out+'rst.lcv.'+ymd_+'_'+hh_+'z.bin' + cmd = bindir+'/mkdrstdate.x ' + ymd_ + ' ' + hms_ +' ' + rstlcvOut + print(cmd) + subprocess.call(shlex.split(cmd)) + print( "cd " + cwdir) + os.chdir(cwdir) + + def get_grid_kind(this, grid): + hgrd = {} + hgrd['C12'] = 'a' + hgrd['C24'] = 'a' + hgrd['C48'] = 'b' + hgrd['C90'] = 'c' + hgrd['C180'] = 'd' + hgrd['C360'] = 'd' + hgrd['C500'] = 'd' + hgrd['C720'] = 'e' + hgrd['C1000'] = 'e' + hgrd['C1440'] = 'e' + hgrd['C2000'] = 'e' + hgrd['C2880'] = 'e' + hgrd['C5760'] = 'e' + return hgrd[grid] + + def find_analysis(self): + analysis_in = [] + rst_dir = self.config['input']['shared']['rst_dir'] + bkgs = glob.glob(rst_dir + '/*_eta_rst*') + sfcs = glob.glob(rst_dir + '/*_sfc_rst*') + anasat = glob.glob(rst_dir + '/*ana_satb*') + traks= glob.glob(rst_dir + '/*.trak.GDA.rst*') + analysis_in = bkgs + sfcs + traks + anasat + return list(dict.fromkeys(analysis_in)) + +if __name__ == '__main__' : + ana = analysis(params_file='remap_params.yaml') + ana.remap() diff --git a/GEOS_Util/post/remap_restart/remap_base.py b/GEOS_Util/post/remap_restart/remap_base.py new file mode 100755 index 00000000..2f5a9406 --- /dev/null +++ b/GEOS_Util/post/remap_restart/remap_base.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# +import os +import ruamel.yaml +import shutil + +class remap_base(object): + def __init__(self, **configs): + for key, value in configs.items(): + if (key == 'params_file'): + print( "use Config yaml file: " + value) + yaml = ruamel.yaml.YAML() + stream ='' + with open(value, 'r') as f: + stream = f.read() + self.config = yaml.load(stream) + out_dir = self.config['output']['shared']['out_dir'] + if not os.path.exists(out_dir) : os.makedirs(out_dir) + f = os.path.basename(value) + dest = out_dir+'/'+f + try: + shutil.copy(value, dest) + except shutil.SameFileError: + pass + if (key == 'config_obj'): + print( "use Config obj") + self.config = value + out_dir = self.config['output']['shared']['out_dir'] + if not os.path.exists(out_dir) : os.makedirs(out_dir) + break diff --git a/GEOS_Util/post/remap_restart/remap_catchANDcn.py b/GEOS_Util/post/remap_restart/remap_catchANDcn.py new file mode 100755 index 00000000..44337f3e --- /dev/null +++ b/GEOS_Util/post/remap_restart/remap_catchANDcn.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python3 +# +import os +import sys +import subprocess +import shutil +import glob +import ruamel.yaml +import shlex +from remap_base import remap_base + +class catchANDcn(remap_base): + def __init__(self, **configs): + super().__init__(**configs) + + def remap(self): + if not self.config['output']['surface']['remap_catch']: + return + + config = self.config + rst_dir = config['input']['shared']['rst_dir'] + model = config['input']['surface']['catch_model'] + in_rstfile = '' + yyyymmddhh_ = str(self.config['input']['shared']['yyyymmddhh']) + time = yyyymmddhh_[0:8]+'_'+yyyymmddhh_[8:10] + in_rstfiles = glob.glob(rst_dir+'/*'+model+'_*'+time+'*') + if len(in_rstfiles) == 0: + print('\n try catchXX file without time stamp') + in_rstfiles = glob.glob(rst_dir+'/*'+model+'_*') + if len(in_rstfiles) == 0: + return + in_rstfile = in_rstfiles[0] + + print("\nRemapping " + model + ".....\n") + + cwdir = os.getcwd() + bindir = os.path.dirname(os.path.realpath(__file__)) + in_bcsdir = config['input']['shared']['bcs_dir'] + out_bcsdir = config['output']['shared']['bcs_dir'] + out_dir = config['output']['shared']['out_dir'] + expid = config['output']['shared']['expid'] + in_wemin = config['input']['surface']['wemin'] + out_wemin = config['output']['surface']['wemin'] + surflay = config['output']['surface']['surflay'] + in_tilefile = glob.glob(in_bcsdir+ '/*-Pfafstetter.til')[0] + out_tilefile = glob.glob(out_bcsdir+ '/*-Pfafstetter.til')[0] + account = config['slurm']['account'] + # even the input is binary, the output si nc4 + suffix = time+'z.nc4' + + if (expid) : + expid = expid + '.' + else: + expid = '' + suffix = '_rst.' + suffix + out_rstfile = expid + os.path.basename(in_rstfile).split('_rst')[0].split('.')[-1]+suffix + + if not os.path.exists(out_dir) : os.makedirs(out_dir) + print( "cd " + out_dir) + os.chdir(out_dir) + + InData_dir = out_dir+'/InData/' + print ("mkdir -p " + InData_dir) + os.makedirs(InData_dir, exist_ok = True) + + f = os.path.basename(in_rstfile) + dest = InData_dir+'/'+f + # file got copy because the computing node cannot access archive + print('\nCopy ' + in_rstfile + ' to ' +dest) + shutil.copyfile(in_rstfile,dest) + in_rstfile = dest + + log_name = out_dir+'/'+'mk_catchANDcn_log' + mk_catch_j_template = """#!/bin/csh -f +#SBATCH --account={account} +#SBATCH --ntasks=56 +#SBATCH --time=1:00:00 +#SBATCH --job-name=mk_catchANDcn +#SBATCH --qos=debug +#SBATCH --output={log_name} +# + +source {Bin}/g5_modules + +limit stacksize unlimited + +set esma_mpirun_X = ( {Bin}/esma_mpirun -np 56 ) +set mk_catchANDcnRestarts_X = ( {Bin}/mk_catchANDcnRestarts.x ) + +set params = ( -model {model} -time {time} -in_tilefile {in_tilefile} ) +set params = ( $params -out_bcs {out_bcs} -out_tilefile {out_tilefile} -out_dir {out_dir} ) +set params = ( $params -surflay {surflay} -in_wemin {in_wemin} -out_wemin {out_wemin} ) +set params = ( $params -in_rst {in_rstfile} -out_rst {out_rstfile} ) +$esma_mpirun_X $mk_catchANDcnRestarts_X $params + +""" + catch1script = mk_catch_j_template.format(Bin = bindir, account = account, out_bcs = out_bcsdir, \ + model = model, out_dir = out_dir, surflay = surflay, log_name = log_name, \ + in_wemin = in_wemin, out_wemin = out_wemin, out_tilefile = out_tilefile, in_tilefile = in_tilefile, \ + in_rstfile = in_rstfile, out_rstfile = out_rstfile, time = yyyymmddhh_ ) + + script_name = './mk_catchANDcn.j' + + catch_scrpt = open(script_name,'wt') + catch_scrpt.write(catch1script) + catch_scrpt.close() + + interactive = os.getenv('SLURM_JOB_ID', default = None) + if ( interactive ) : + print('interactive mode\n') + ntasks = os.getenv('SLURM_NTASKS', default = None) + if ( not ntasks): + nnodes = int(os.getenv('SLURM_NNODES', default = '1')) + ncpus = int(os.getenv('SLURM_CPUS_ON_NODE', default = '28')) + ntasks = nnodes * ncpus + ntasks = int(ntasks) + NPE = 56 + if (ntasks < NPE): + print("\nYou should have at least {NPE} cores. Now you only have {ntasks} cores ".format(NPE=NPE, ntasks=ntasks)) + + subprocess.call(['chmod', '755', script_name]) + print(script_name+ ' 1>' + log_name + ' 2>&1') + os.system(script_name + ' 1>' + log_name+ ' 2>&1') + + else: + print("sbatch -W " + script_name +"\n") + subprocess.call(['sbatch','-W', script_name]) + + print( "cd " + cwdir) + os.chdir(cwdir) + +if __name__ == '__main__' : + catch = catchANDcn(params_file='remap_params.yaml') + catch.remap() diff --git a/GEOS_Util/post/remap_restart/remap_lake_landice_saltwater.py b/GEOS_Util/post/remap_restart/remap_lake_landice_saltwater.py new file mode 100755 index 00000000..4d25ae33 --- /dev/null +++ b/GEOS_Util/post/remap_restart/remap_lake_landice_saltwater.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python3 +# +import os +import subprocess +import shutil +import glob +import ruamel.yaml +import shlex +from remap_base import remap_base + +class lake_landice_saltwater(remap_base): + def __init__(self, **configs): + super().__init__(**configs) + + def remap(self): + if not self.config['output']['surface']['remap_water']: + return + + restarts_in = self.find_rst() + if len(restarts_in) == 0: + return + + print("\nRemapping land, landice, saltwater.....\n") + config = self.config + cwdir = os.getcwd() + bindir = os.path.dirname(os.path.realpath(__file__)) + in_bcsdir = config['input']['shared']['bcs_dir'] + out_bcsdir = config['output']['shared']['bcs_dir'] + out_dir = config['output']['shared']['out_dir'] + + if not os.path.exists(out_dir) : os.makedirs(out_dir) + print( "cd " + out_dir) + os.chdir(out_dir) + + InData_dir = out_dir+'/InData/' + if os.path.exists(InData_dir) : subprocess.call(['rm', '-rf',InData_dir]) + print ("mkdir " + InData_dir) + os.makedirs(InData_dir) + + OutData_dir = out_dir+'/OutData/' + if os.path.exists(OutData_dir) : subprocess.call(['rm', '-rf',OutData_dir]) + print ("mkdir " + OutData_dir) + os.makedirs(OutData_dir) + + types = 'z.bin' + type_str = subprocess.check_output(['file','-b', restarts_in[0]]) + type_str = str(type_str) + if 'Hierarchical' in type_str: + types = 'z.nc4' + yyyymmddhh_ = str(config['input']['shared']['yyyymmddhh']) + suffix = yyyymmddhh_[0:8]+'_'+yyyymmddhh_[8:10]+ types + + saltwater = '' + seaice = '' + landice = '' + lake = '' + route = '' + openwater = '' + for rst in restarts_in: + f = os.path.basename(rst) + dest = InData_dir+'/'+f + if os.path.exists(dest) : shutil.remove(dest) + print('\nCopy ' + rst + ' to ' +dest) + shutil.copy(rst,dest) + if 'saltwater' in f : saltwater = f + if 'seaice' in f : seaice = f + if 'landice' in f : landice = f + if 'lake' in f : lake = f + if 'roue' in f : route = f + if 'openwater' in f : openwater = f + + in_tile_file = glob.glob(in_bcsdir+ '/*-Pfafstetter.til')[0] + out_tile_file = glob.glob(out_bcsdir+ '/*-Pfafstetter.til')[0] + + in_til = InData_dir+'/' + os.path.basename(in_tile_file) + out_til = OutData_dir+'/'+ os.path.basename(out_tile_file) + + if os.path.exists(in_til) : shutil.remove(in_til) + if os.path.exists(out_til) : shutil.remove(out_til) + print('\n Copy ' + in_tile_file + ' to ' + in_til) + shutil.copy(in_tile_file, in_til) + print('\n Copy ' + out_tile_file + ' to ' + out_til) + shutil.copy(out_tile_file, out_til) + + exe = bindir + '/mk_LakeLandiceSaltRestarts.x ' + zoom = config['input']['surface']['zoom'] + + if (saltwater): + cmd = exe + out_til + ' ' + in_til + ' InData/'+ saltwater + ' 0 ' + str(zoom) + print('\n'+cmd) + subprocess.call(shlex.split(cmd)) + + # split Saltwater + if config['output']['surface']['split_saltwater']: + print("\nSplitting Saltwater...\n") + cmd = bindir+'/SaltIntSplitter.x ' + out_til + ' ' + 'OutData/' + saltwater + print('\n'+cmd) + subprocess.call(shlex.split(cmd)) + openwater = '' + seaice = '' + + if (openwater): + cmd = exe + out_til + ' ' + in_til + ' InData/' + openwater + ' 0 ' + str(zoom) + print('\n'+cmd) + subprocess.call(shlex.split(cmd)) + + if (seaice): + cmd = exe + out_til + ' ' + in_til + ' InData/' + seaice + ' 0 ' + str(zoom) + print('\n'+cmd) + subprocess.call(shlex.split(cmd)) + + if (lake): + cmd = exe + out_til + ' ' + in_til + ' InData/' + lake + ' 19 ' + str(zoom) + print('\n'+cmd) + subprocess.call(shlex.split(cmd)) + + if (landice): + cmd = exe + out_til + ' ' + in_til + ' InData/' + landice + ' 20 ' + str(zoom) + print('\n'+cmd) + subprocess.call(shlex.split(cmd)) + + if (route): + route = bindir + '/mk_RouteRestarts.x ' + cmd = route + out_til + ' ' + yyyymmddhh_[0:6] + print('\n'+cmd) + subprocess.call(shlex.split(cmd)) + + expid = config['output']['shared']['expid'] + if (expid) : + expid = expid + '.' + else: + expid = '' + suffix = '_rst.' + suffix + for out_rst in glob.glob("OutData/*_rst*"): + filename = expid + os.path.basename(out_rst).split('_rst')[0].split('.')[-1]+suffix + print('\n Move ' + out_rst + ' to ' + out_dir+"/"+filename) + shutil.move(out_rst, out_dir+"/"+filename) + print('cd ' + cwdir) + os.chdir(cwdir) + + def find_rst(self): + surf_restarts =[ + "route_internal_rst" , + "lake_internal_rst" , + "landice_internal_rst" , + "openwater_internal_rst" , + "saltwater_internal_rst" , + "seaicethermo_internal_rst"] + + rst_dir = self.config['input']['shared']['rst_dir'] + yyyymmddhh_ = str(self.config['input']['shared']['yyyymmddhh']) + time = yyyymmddhh_[0:8]+'_'+yyyymmddhh_[8:10] + restarts_in=[] + for f in surf_restarts : + files = glob.glob(rst_dir+ '/*'+f+'*'+time+'*') + if len(files) >0: + restarts_in.append(files[0]) + if (len(restarts_in) == 0) : + print("\n try restart file names without time stamp\n") + for f in surf_restarts : + fname = rst_dir+ '/'+f + if os.path.exists(fname): + restarts_in.append(fname) + + return restarts_in + +if __name__ == '__main__' : + lls = lake_landice_saltwater(params_file='remap_params.yaml') + lls.remap() diff --git a/GEOS_Util/post/remap_restart/remap_params.py b/GEOS_Util/post/remap_restart/remap_params.py new file mode 100755 index 00000000..cd5ec03c --- /dev/null +++ b/GEOS_Util/post/remap_restart/remap_params.py @@ -0,0 +1,611 @@ +#!/usr/bin/env python3 +# +import os,sys +import ruamel.yaml +import shutil +import glob +import time +import shlex +import subprocess +import questionary +from datetime import datetime +from datetime import timedelta + +class remap_params(object): + def __init__(self, config_from_question): + self.common_in = config_from_question['input']['shared'] + self.common_out = config_from_question['output']['shared'] + self.upper_out = config_from_question['output']['air'] + self.slurm_options = config_from_question['slurm'] + self.surf_in = config_from_question['input']['surface'] + self.surf_out = config_from_question['output']['surface'] + self.ana_out = config_from_question['output']['analysis'] + + self.init_time() + self.init_tags() + self.init_merra2() + + + # load input yaml + yaml = ruamel.yaml.YAML() + stream = '' + remap_tpl = os.path.dirname(os.path.realpath(__file__)) + '/remap_params.tpl' + with open(remap_tpl, 'r') as f: + stream = f.read() + config_tpl = yaml.load(stream) + + # params for shared + config_tpl['input']['shared']['agrid'] = self.common_in.get('agrid') + config_tpl['input']['shared']['ogrid'] = self.common_in.get('ogrid') + config_tpl['input']['shared']['rst_dir'] = self.common_in['rst_dir']+'/' + config_tpl['input']['shared']['expid'] = self.common_in.get('expid') + config_tpl['input']['shared']['yyyymmddhh'] = self.common_in['yyyymmddhh'] + + config_tpl['output']['air']['nlevel'] = self.upper_out.get('nlevel') + config_tpl['output']['air']['remap'] = self.upper_out.get('remap') + config_tpl['output']['surface']['remap_water'] = self.surf_out.get('remap') + config_tpl['output']['surface']['remap_catch'] = self.surf_out.get('remap') + config_tpl['output']['shared']['agrid'] = self.common_out['agrid'] + config_tpl['output']['shared']['ogrid'] = self.common_out['ogrid'] + config_tpl['output']['shared']['out_dir'] = self.common_out['out_dir'] + '/' + config_tpl['output']['shared']['expid'] = self.common_out['expid'] + + # params for upper air + config_tpl = self.params_for_air(config_tpl) + config_tpl = self.params_for_surface(config_tpl) + config_tpl = self.params_for_analysis(config_tpl) + config_tpl = self.options_for_slurm(config_tpl) + + # get bc directory and tile file + in_bcsdir = self.get_bcdir("IN") + out_bcsdir = self.get_bcdir("OUT") + config_tpl['input']['shared']['bcs_dir'] = in_bcsdir+ '/' + config_tpl['output']['shared']['bcs_dir'] = out_bcsdir + '/' + + self.config = config_tpl + + def convert_to_yaml(self) : + if os.path.exists('remap_params.yaml') : + overwrite = questionary.confirm("Do you want to overwrite remap_params.yaml file?", default=False).ask() + if not overwrite : + while True: + new_name = questionary.text("What's the backup name for existing remap_params.yaml?", default='remap_params.yaml.1').ask() + if os.path.exists(new_name): + print('\n'+ new_name + ' exists, please enter a new one. \n') + else: + shutil.move('remap_params.yaml', new_name) + break + yaml = ruamel.yaml.YAML() + with open("remap_params.yaml", "w") as f: + yaml.dump(self.config, f) + + def init_tags(self): + # copy and paste from remap.pl + # minor change. Add "D" to the number for each group + # BCS Tag: Fortuna-1_4 + F14 = ( 'F14', 'Fortuna-1_4', 'Fortuna-1_4_p1' ) + D214 = ( 'D214', 'GEOSdas-2_1_4', 'GEOSdas-2_1_4-m1', + 'GEOSdas-2_1_4-m2', 'GEOSdas-2_1_4-m3', 'GEOSdas-2_1_4-m4' ) + D540 = ( 'D540', 'GEOSadas-5_4_0', 'GEOSadas-5_4_0_p1', + 'GEOSadas-5_4_0_p2', 'GEOSadas-5_4_0_p3', 'GEOSadas-5_4_0_p4', + 'GEOSadas-5_4_1', 'GEOSadas-5_4_1_p1', 'GEOSadas-5_4_2', + 'GEOSadas-5_4_3', 'GEOSadas-5_4_4', 'GEOSadas-5_5_0', + 'GEOSadas-5_5_1', 'GEOSadas-5_5_2', 'GEOSadas-5_5_3' ) + + # BCS Tag: Fortuna-2_0 + #--------------------- + F20 = ( 'F20', 'Fortuna-2_0') + + # BCS Tag: Fortuna-2_1 + #--------------------- + F21 = ( 'F21', 'Fortuna-2_1', 'Fortuna-2_1_p1', + 'Fortuna-2_1_p2', 'Fortuna-2_1_p3', 'Fortuna-2_2', + 'Fortuna-2_2_p1', 'Fortuna-2_2_p2', 'Fortuna-2_3', + 'Fortuna-2_3_p1', 'Fortuna-2_4', 'Fortuna-2_4_p1', + 'Fortuna-2_4_p2', 'Fortuna-2_5', 'Fortuna-2_5_BETA0', + 'Fortuna-2_5_p1', 'Fortuna-2_5_p2', 'Fortuna-2_5_p3', + 'Fortuna-2_5_p4', 'Fortuna-2_5_p5', 'Fortuna-2_5_p6', + 'Fortuna-2_5_pp2' ) + D561 = ( 'D561', 'GEOSadas-5_6_1', 'GEOSadas-5_6_1_p1', + 'GEOSadas-5_6_1_p2', 'GEOSadas-5_6_1_p3', 'GEOSadas-5_6_1_p4', + 'GEOSadas-5_6_2', 'GEOSadas-5_6_2_p1', 'GEOSadas-5_6_2_p2', + 'GEOSadas-5_6_2_p3', 'GEOSadas-5_6_2_p4', 'GEOSadas-5_6_2_p5', + 'GEOSadas-5_6_2_p6', 'GEOSadas-5_7_1', 'GEOSadas-5_7_1_p1', + 'GEOSadas-5_7_1_p2', 'GEOSadas-5_7_2', 'GEOSadas-5_7_2_p1', + 'GEOSadas-5_7_2_p2', 'GEOSadas-5_7_2_p2_m1','GEOSadas-5_7_2_p3', + 'GEOSadas-5_7_2_p3_m1', 'GEOSadas-5_7_2_p3_m2','GEOSadas-5_7_2_p4', + 'GEOSadas-5_7_2_p5', 'GEOSadas-5_7_2_p5_m1','GEOSadas-5_7_3', + 'GEOSadas-5_7_3_p1', 'GEOSadas-5_7_3_p2', 'GEOSadas-5_7_3_p2' ) + + # BCS Tag: Ganymed-1_0 + #--------------------- + G10 = ( 'G10', 'Ganymed-1_0', 'Ganymed-1_0_BETA', + 'Ganymed-1_0_BETA1', 'Ganymed-1_0_BETA2', 'Ganymed-1_0_BETA3', + 'Ganymed-1_0_BETA4' ) + + D580 = ( 'D580', 'GEOSadas-5_8_0', 'GEOSadas-5_9_0', + 'GEOSadas-5_9_1' ) + + # BCS Tags: Ganymed-1_0_M and Ganymed-1_0_D + #------------------------------------------ + G10p = ( 'G10p', 'Ganymed-1_0_p1', 'Ganymed-1_0_p2', + 'Ganymed-1_0_p3', 'Ganymed-1_0_p4', 'Ganymed-1_0_p5', + 'Ganymed-1_0_p6' ) + + D591p= ( 'D591p', 'GEOSadas-5_9_1_p1', 'GEOSadas-5_9_1_p2', + 'GEOSadas-5_9_1_p3', 'GEOSadas-5_9_1_p4', 'GEOSadas-5_9_1_p5', + 'GEOSadas-5_9_1_p6', 'GEOSadas-5_9_1_p7', 'GEOSadas-5_9_1_p8', + 'GEOSadas-5_9_1_p9' ) + + # BCS Tags: Ganymed-1_0_M and Ganymed-1_0_D w/ new landice rst + #------------------------------------------------------------------------ + G20 = ( 'G20', 'Ganymed-2_0', 'Ganymed-2_1', + 'Ganymed-2_1_p1', 'Ganymed-2_1_p2', 'Ganymed-2_1_p3', + 'Ganymed-2_1_p4', 'Ganymed-2_1_p5', 'Ganymed-2_1_p6' ) + D5A0 = ( 'D5A0', 'GEOSadas-5_10_0', 'GEOSadas-5_10_0_p1' ) + + + # BCS Tags: Ganymed-1_0_Reynolds and Ganymed-1_0_Ostia + #----------------------------------------------------- + G30 = ( 'G30', 'Ganymed-3_0', 'Ganymed-3_0_p1' ) + D5B0 = ( '5B0', 'GEOSadas-5_10_0_p2', 'GEOSadas-5_11_0' ) + + # BCS Tags: Ganymed-4_0_Reynolds, Ganymed-4_0_MERRA-2, and Ganymed-4_0_Ostia + #--------------------------------------------------------------------------- + G40 = ( 'G40', 'Ganymed-4_0', 'Ganymed-4_0_p1', + 'Ganymed-4_1', 'Heracles-1_0', 'Heracles-1_1', + 'Heracles-2_0', 'Heracles-2_1', 'Heracles-3_0', + 'Heracles-4_0', 'Heracles-5_4_p3' ) + D512 = ( '512', 'GEOSadas-5_12_2', 'GEOSadas-5_12_4', + 'GEOSadas-5_12_4_p1', 'GEOSadas-5_12_4_p2', 'GEOSadas-5_12_4_p3', + 'GEOSadas-5_12_5', 'GEOSadas-5_13_0_p1', 'GEOSadas-5_13_0_p2', + 'GEOSadas-5_13_1', 'GEOSadas-5_16_5' ) + + # BCS Tags: Icarus (New Land Parameters, New Topography) + #--------------------------------------------------------------------------- + ICA = ( 'ICA', 'Icarus', 'Jason' ) + D517 = ( '517', 'GEOSadas-5_17_0', 'GEOSadas-5_17_1', 'GEOSadas-5_18_0', + 'GEOSadas-5_18_1', 'GEOSadas-5_18_2', 'GEOSadas-5_18_3', + 'GEOSadas-5_18_3_p1', 'GEOSadas-5_19_0', 'GEOSadas-5_20_0', + 'GEOSadas-5_20_0_p1', 'GEOSadas-5_20_0_p2', 'GEOSadas-5_21_0', + 'GEOSadas-5_21_2', 'GEOSadas-5_21_3_p1', 'GEOSadas-5_22_0', + 'GEOSadas-5_22_0_p1', 'GEOSadas-5_22_0_p2', 'GEOSadas-5_23_0', + 'GEOSadas-5_23_0_p1', 'GEOSadas-5_24_0', 'GEOSadas-5_24_0_p1' ) + GITOL = ( 'GITOL', '10.3', '10.4', '10.5', + '10.6', '10.7', '10.8', + '10.9', '10.10', '10.11', + '10.12', '10.13', '10.14', + '10.15', '10.16', '10.17', + '10.18' ) + + # BCS Tags: Icarus-NLv3 (New Land Parameters) + #--------------------------------------------------------------------------- + INL = ( 'INL', 'Icarus-NL', 'Icarus-NLv3', 'Jason-NL' ) + GITNL = ( 'GITNL', '10.19', '10.20', '10.21', '10.22', '10.23' ) + D525 = ( '525', 'GEOSadas-5_25_1', 'GEOSadas-5_25_1_p5', 'GEOSadas-5_25_p7', + 'GEOSadas-5_27_1', 'GEOSadas-5_29_3', 'GEOSadas-5_29_4' ) + + self.bcsTag={} + for tag in F14: self.bcsTag[tag]= "Fortuna-1_4" + for tag in F20: self.bcsTag[tag]= "Fortuna-2_0" + for tag in F21: self.bcsTag[tag]= "Fortuna-2_1" + for tag in G10: self.bcsTag[tag]= "Ganymed-1_0" + for tag in G10p: self.bcsTag[tag]= "Ganymed-1_0_M" + for tag in G20: self.bcsTag[tag]= "Ganymed-1_0_M" + for tag in G30: self.bcsTag[tag]= "Ganymed-1_0_Reynolds" + for tag in G40: self.bcsTag[tag]= "Ganymed-4_0_Reynolds" + for tag in ICA: self.bcsTag[tag]= "Icarus_Reynolds" + for tag in GITOL: self.bcsTag[tag]= "Icarus_Reynolds" + for tag in INL: self.bcsTag[tag]= "Icarus-NLv3_Reynolds" + for tag in GITNL: self.bcsTag[tag]= "Icarus-NLv3_Reynolds" + + + for tag in D214: self.bcsTag[tag]= "Fortuna-1_4" + for tag in D540: self.bcsTag[tag]= "Fortuna-1_4" + for tag in D561: self.bcsTag[tag]= "Fortuna-2_1" + for tag in D580: self.bcsTag[tag]= "Ganymed-1_0" + for tag in D591p: self.bcsTag[tag]= "Ganymed-1_0_M" + for tag in D5A0: self.bcsTag[tag]= "Ganymed-1_0_M" + for tag in D5B0: self.bcsTag[tag]= "Ganymed-1_0_Reynolds" + for tag in D512: self.bcsTag[tag]= "Ganymed-4_0_Reynolds" + for tag in D517: self.bcsTag[tag]= "Icarus_Reynolds" + for tag in D525: self.bcsTag[tag]= "Icarus-NLv3_Reynolds" + + self.tagsRank ={} + self.tagsRank['Fortuna-1_4'] = 1 + self.tagsRank['Fortuna-1_5'] = 2 + self.tagsRank['Fortuna-2_0'] = 3 + self.tagsRank['Fortuna-2_1'] = 4 + self.tagsRank['Ganymed-1_0'] = 5 + self.tagsRank['Ganymed-1_0_m1'] = 6 + self.tagsRank['Ganymed-1_0_m2'] = 7 + self.tagsRank['Ganymed-1_0_M'] = 8 + self.tagsRank['Ganymed-1_0_D'] = 9 + self.tagsRank['Ganymed-1_0_Reynolds'] = 10 + self.tagsRank['Ganymed-1_0_Ostia'] = 11 + self.tagsRank['Ganymed-4_0_Reynolds'] = 12 + self.tagsRank['Ganymed-4_0_Ostia'] = 13 + self.tagsRank['Ganymed-4_0_MERRA-2'] = 14 + self.tagsRank['Icarus_Reynolds'] = 15 + self.tagsRank['Icarus_MERRA-2'] = 16 + self.tagsRank['Icarus_Ostia'] = 17 + self.tagsRank['Icarus-NLv3_Reynolds'] = 18 + self.tagsRank['Icarus-NLv3_MERRA-2'] = 19 + self.tagsRank['Icarus-NLv3_Ostia'] = 20 + + self.bcbase={} + self.bcbase['discover_ops'] = "/discover/nobackup/projects/gmao/share/gmao_ops/fvInput/g5gcm/bcs" + self.bcbase['discover_lt'] = "/discover/nobackup/ltakacs/bcs" + self.bcbase['discover_couple'] = "/discover/nobackup/projects/gmao/ssd/aogcm/atmosphere_bcs" + + def init_time(self): + ymdh = self.common_in.get('yyyymmddhh') + self.yyyymm = ymdh[0:6] + self.yyyy = ymdh[0:4] + self.mm = ymdh[4:6] + self.dd = ymdh[6:8] + self.hh = ymdh[8:10] + self.ymd = ymdh[0:8] + + def init_merra2(self): + if not self.common_in['MERRA-2']: + return + print("\n MERRA-2 sources:\n") + yyyymm = int(self.yyyymm) + if yyyymm < 197901 : + exit("Error. MERRA-2 data < 1979 not available\n") + elif (yyyymm < 199201): + self.common_in['expid'] = "d5124_m2_jan79" + elif (yyyymm < 200106): + self.common_in['expid'] = "d5124_m2_jan91" + elif (yyyymm < 201101): + self.common_in['expid'] = "d5124_m2_jan00" + else: + self.common_in['expid'] = "d5124_m2_jan10" + + self.common_in['agrid'] = 'C180' + self.common_in['ogrid'] = '1440x720' + self.common_in['bc_base']= 'discover_ops' + self.common_in['tag']= 'Ganymed-4_0' + + def copy_merra2(self): + if not self.common_in['MERRA-2']: + return + print("\n Copy MERRA-2 :\n") + expid = self.common_in['expid'] + yyyymmddhh_ = str(self.common_in['yyyymmddhh']) + surfix = yyyymmddhh_[0:8]+'_'+self.hh+'z.bin' + merra_2_rst_dir = '/archive/users/gmao_ops/MERRA2/gmao_ops/GEOSadas-5_12_4/'+expid +'/rs/Y'+self.yyyy +'/M'+self.mm+'/' + rst_dir = self.common_in['rst_dir'] + '/' + os.makedirs(rst_dir, exist_ok = True) + print(' Copy MERRA-2 Restart \n from \n ' + merra_2_rst_dir + '\n to\n '+ rst_dir +'\n') + + upperin =[merra_2_rst_dir + expid+'.fvcore_internal_rst.' + surfix, + merra_2_rst_dir + expid+'.moist_internal_rst.' + surfix, + merra_2_rst_dir + expid+'.agcm_import_rst.' + surfix, + merra_2_rst_dir + expid+'.gocart_internal_rst.' + surfix, + merra_2_rst_dir + expid+'.pchem_internal_rst.' + surfix ] + + surfin = [ merra_2_rst_dir + expid+'.catch_internal_rst.' + surfix, + merra_2_rst_dir + expid+'.lake_internal_rst.' + surfix, + merra_2_rst_dir + expid+'.landice_internal_rst.' + surfix, + merra_2_rst_dir + expid+'.saltwater_internal_rst.'+ surfix] + + for f in upperin : + fname = os.path.basename(f) + dest = rst_dir + '/'+fname + print("Copy file "+f +" to " + rst_dir) + shutil.copy(f, dest) + + for f in surfin : + fname = os.path.basename(f) + dest = rst_dir + '/'+fname + print("Copy file "+f +" to " + rst_dir) + shutil.copy(f, dest) + + # prepare analysis files + bkg = self.ana_out['bkg'] + if ( not bkg ): return + yyyy_ = yyyymmddhh_[0:4] + mm_ = yyyymmddhh_[4:6] + dd_ = yyyymmddhh_[6:8] + hh_ = yyyymmddhh_[8:10] + rst_time = datetime(year=int(yyyy_), month=int(mm_), day=int(dd_), hour = int(hh_)) + expid_in = self.common_in['expid'] + expid_out = self.common_out['expid'] + if (expid_out) : + expid_out = expid_out + '.' + else: + expid_out = '' + + agrid_in = self.common_in['agrid'] + agrid_out = self.common_out['agrid'] + + anafiles=[] + for h in [3,4,5,6,7,8,9]: + delt = timedelta(hours = h-3) + new_time = rst_time + delt + yyyy = "Y"+str(new_time.year) + mm = 'M%02d'%new_time.month + ymd = '%04d%02d%02d'%(new_time.year,new_time.month, new_time.day) + hh = '%02d'%h + newhh= '%02d'%new_time.hour + m2_rst_dir = merra_2_rst_dir.replace('Y'+yyyy_,yyyy).replace('M'+mm_,mm) + # bkg files + for ftype in ['sfc', 'eta']: + fname = expid_in+'.bkg'+hh+'_'+ftype+'_rst.'+ymd+'_'+newhh+'z.nc4' + f = m2_rst_dir+'/'+fname + if(os.path.isfile(f)): + anafiles.append(f) + else: + print('Warning: Cannot find '+f) + # cbkg file + fname = expid_in + '.cbkg' + hh + '_eta_rst.' + ymd + '_' + newhh + 'z.nc4' + f = m2_rst_dir+'/'+fname + if(os.path.isfile(f)): + anafiles.append(f) + else: + print('Warning: Cannot find '+f) + # gaas_bkg_sfc files + if (h==6 or h==9): + fname = expid_in+'.gaas_bkg_sfc_rst.'+ymd+'_'+newhh+'z.nc4' + f = m2_rst_dir+'/'+fname + if (os.path.isfile(f)): + anafiles.append(f) + else: + print('Warning: Cannot find '+f) + # trak.GDA.rst file + delt = timedelta(hours = 3) + new_time = rst_time - delt + yyyy = "Y"+str(new_time.year) + mm = 'M%02d'%new_time.month + ymdh = '%04d%02d%02d%02d'%(new_time.year, new_time.month, new_time.day, new_time.hour) + m2_rst_dir = merra_2_rst_dir.replace('Y'+yyyy_,yyyy).replace('M'+mm_,mm) + fname = expid_in+'.trak.GDA.rst.'+ymdh+'z.txt' + f = m2_rst_dir+'/'+fname + if (os.path.isfile(f)): anafiles.append(f) + + for f in anafiles: + fname = os.path.basename(f) + f_tmp = rst_dir+'/'+fname + print("Copy file "+f +" to " + rst_dir) + shutil.copy(f,f_tmp) + + def get_bcbase(self, opt): + base = '' + model = '' + + if opt.upper() == 'IN': + model = self.common_in.get('model') + if model == 'MOM6' or model == 'MOM5': + base = 'discover_couple' + else: + base = self.common_in.get('bc_base') + + if opt.upper() == 'OUT': + model = self.common_out.get('model') + if model == 'MOM6' or model == 'MOM5': + base = 'discover_couple' + else: + base = self.common_out.get('bc_base') + assert base, 'please specify bc_base: discover_ops, discover_lt, discover_couple or an absolute path' + if base == 'discover_ops' or base == 'discover_lt' or base=='discover_couple': + return self.bcbase[base] + else: + return base + + def get_bcdir(self, opt): + tag = self.common_in['tag'] + ogrid = self.common_in['ogrid'] + model = self.common_in['model'] + bcdir = self.common_in.get('alt_bcs', None) + if opt.upper() == "OUT": + tag = self.common_out['tag'] + ogrid = self.common_out['ogrid'] + model = self.common_out['model'] + bcdir = self.common_out.get('alt_bcs', None) + + if bcdir is None : + bc_base = self.get_bcbase(opt) + bctag = self.get_bcTag(tag,ogrid) + tagrank = self.tagsRank[bctag] + if (tagrank >= self.tagsRank['Icarus-NLv3_Reynolds']) : + bcdir = bc_base+'/Icarus-NLv3/'+bctag+'/' + if model == 'MOM6' or model == 'MOM5': + bcdir = bc_base+'/Icarus-NLv3/'+model+'/' + elif (tagrank >= self.tagsRank['Icarus_Reynolds']): + if bc_base == self.bcbase['discover_ops']: + bcdir = bc_base+'/Icarus_Updated/'+bctag+'/' + else: + bcdir = bc_base+'/Icarus/'+bctag+'/' + if model == 'MOM6' or model == 'MOM5': + bcdir = bc_base+'/Icarus/'+model+'/' + elif(tagrank >= self.tagsRank["Ganymed-4_0_Reynolds"]): + bcdir = bc_base + '/Ganymed-4_0/'+bctag+'/' + if model == 'MOM6' or model == 'MOM5': + bcdir = bc_base+'/Ganymed/'+model+'/' + else: + bcdir = bc_base + '/' + bctag + '/' + if model == 'MOM6' or model == 'MOM5': + bcdir = bc_base+'/Ganymed/'+model+'/' + + if not os.path.exists(bcdir): + exit("Cannot find bc dir " + bcdir) + + gridStr = self.get_grid_subdir(bcdir,opt) + bcdir = bcdir + '/' + gridStr + + return bcdir + + def get_grid_subdir(self, bcdir, opt): + + def get_name_with_grid( grid, names, a_o): + if not grid : + return names + namex = [] + if (grid[0].upper() == 'C'): + n = int(grid[1:]) + s1 ='{n}x6C'.format(n=n) + j=n*6 + s2 =str(n) + s3 =str(j) + # first try + for aoname in names: + name = '' + if(a_o == 'a'): + name = aoname.split('_')[0] + else: + name = aoname.split('_')[1] + if (name.find(s1) != -1 or (name.find(s2) != -1 and name.find(s3) != -1 )): + namex.append(aoname) + else: + xy = grid.upper().split('X') + s2 = xy[0] + s3 = xy[1] + for aoname in names: + name = '' + if(a_o == 'a'): + name = aoname.split('_')[0] + else: + name = aoname.split('_')[1] + if (name.find(s2) != -1 and name.find(s3) != -1): namex.append(aoname) + return namex + #v3.5 + #dirnames = [ f.name for f in os.scandir(bcdir) if f.is_dir()] + #v2.7 + dirnames = [f for f in os.listdir(bcdir) if os.path.isdir(os.path.join(bcdir,f))] + agrid_ = self.common_in['agrid'] + ogrid_ = self.common_in['ogrid'] + if opt.upper() == "OUT" : + agrid_ = self.common_out['agrid'] + ogrid_ = self.common_out['ogrid'] + + anames = get_name_with_grid(agrid_, dirnames, 'a') + gridID = get_name_with_grid(ogrid_, anames, 'o') + if len(gridID) == 0 : + exit("cannot find the grid subdirctory of agrid: " +agrid_+ " and ogrid " + ogrid_ + " under "+ bcdir) + g = '' + if len(gridID) == 1 : g = gridID[0] + if len(gridID) >=2 : + print("find too many grid strings in " + bcdir) + print(" gridIDs found", gridID) + for g_ in gridID: + if g_.count('_') == 1 : + g = g_ + #WY note, found many string in couple model + print(" pick the first directory with only one '_' " + g) + break + return g + + def get_bcTag(self, tag, ogrid): + bctag = self.bcsTag[tag] + if ogrid[0].upper() == "C": + bctag=bctag.replace('_Reynolds','_Ostia') + else: + xy = ogrid.upper().split('X') + x = int(xy[0]) + if x == 1440: bctag=bctag.replace('_Reynolds','_MERRA-2') + if x == 2880: + bctag=bctag.replace('_Reynolds','_Ostia') + bctag=bctag.replace('_M','_D') + return bctag + + def params_for_air(self, config_tpl): + if self.common_in['MERRA-2']: + return config_tpl + # verify agrid + rst_dir = self.common_in['rst_dir'] + '/' + time = self.ymd + '_'+ self.hh + files = glob.glob(rst_dir +'/*fvcore_*'+time+'*') + if len(files) == 0 : + fname_ = rst_dir +'/fvcore_internal_rst' + if os.path.exists(fname_) : + files.append(fname_) + + # get expid + if (len(files) >0) : + fname = os.path.basename(files[0]) + expid = fname.split('fvcore')[0] + config_tpl['input']['shared']['expid'] = expid[0:-1] #remove the last '.' + + agrid_ = self.common_in['agrid'] + if self.common_in['ogrid'] == 'CS' : + config_tpl['input']['shared']['ogrid'] = agrid_ + self.common_in['ogrid'] = agrid_ + + ogrid = config_tpl['input']['shared']['ogrid'] + tagout = self.common_out['tag'] + bctag = self.get_bcTag(tagout, ogrid) + tagrank = self.tagsRank[bctag] + if ( not config_tpl['input']['air']['drymass']) : + config_tpl['input']['air']['drymass'] = 0 + if tagrank >=12 : + config_tpl['input']['air']['drymass'] = 1 + + return config_tpl + + def options_for_slurm(self, config_tpl): + config_tpl['slurm']['account'] = self.slurm_options['account'] + config_tpl['slurm']['qos'] = self.slurm_options['qos'] + config_tpl['slurm']['constraint'] = self.slurm_options['constraint'] + return config_tpl + + def params_for_surface(self, config_tpl): + config_tpl['output']['surface']['surflay'] = 20. + tagout = self.common_out['tag'] + ogrid = self.common_out['ogrid'] + bctag = self.get_bcTag(tagout, ogrid) + tagrank = self.tagsRank[bctag] + if tagrank >=12 : + config_tpl['output']['surface']['surflay'] = 50. + if tagrank >= self.tagsRank["Icarus_Reynolds"]: + config_tpl['output']['surface']['split_saltwater'] = True + config_tpl['input']['surface']['zoom']= self.surf_in['zoom'] + config_tpl['input']['surface']['wemin']= self.surf_in['wemin'] + config_tpl['output']['surface']['wemin']= self.surf_out['wemout'] + + rst_dir = self.common_in['rst_dir'] + '/' + time = self.ymd + '_'+ self.hh + files = glob.glob(rst_dir +'/*catch_*'+time+'*') + if (len(files)== 0) : + files = glob.glob(rst_dir +'/*catch_*') + + if (len(files) > 0) : + config_tpl['input']['surface']['catch_model'] = 'catch' + + files = glob.glob(rst_dir +'/*catchcnclm40_*'+time+'*') + if (len(files)== 0) : + files = glob.glob(rst_dir +'/*catchcnclm40_*') + + if (len(files) > 0) : + config_tpl['input']['surface']['catch_model'] = 'catchcnclm40' + + files = glob.glob(rst_dir +'/*catchcnclm45_*'+time+'*') + if (len(files)== 0) : + files = glob.glob(rst_dir +'/*catchcnclm45_*') + + if (len(files) > 0) : + config_tpl['input']['surface']['catch_model'] = 'catchcnclm45' + + return config_tpl + + def params_for_analysis(self, config_tpl): + config_tpl['output']['analysis']['lcv'] = self.ana_out.get('lcv') + config_tpl['output']['analysis']['bkg'] = self.ana_out.get('bkg') + + ogrid = self.common_out['ogrid'] + tagout = self.common_out['tag'] + bctag = self.get_bcTag(tagout, ogrid) + tagrank = self.tagsRank[bctag] + if tagrank >= self.tagsRank["Ganymed-4_0_Reynolds"] : + config_tpl['output']['analysis']['aqua'] = True + return config_tpl + +if __name__ == "__main__": + yaml = ruamel.yaml.YAML() + stream ='' + with open("raw_answers.yaml", "r") as f: + stream = f.read() + config = yaml.load(stream) + param = remap_params(config) + param.convert_to_yaml() diff --git a/GEOS_Util/post/remap_restart/remap_params.tpl b/GEOS_Util/post/remap_restart/remap_params.tpl new file mode 100644 index 00000000..d08caee7 --- /dev/null +++ b/GEOS_Util/post/remap_restart/remap_params.tpl @@ -0,0 +1,50 @@ +# +# This template file can be filled with questionary or manually +# +# + +input: + air: + drymass: 1 + hydrostatic: 0 + shared: + agrid: + bcs_dir: + expid: + ogrid: + rst_dir: + yyyymmddhh: + surface: + zoom: + wemin: + # it supports three models: catch, catchcnclm40, catchcnclm45 + catch_model: null + +output: + shared: + agrid: + bcs_dir: + expid: + ogrid: + out_dir: + air: + # remap upper air or not + remap: true + nlevel: + surface: + split_saltwater: false + surflay: 20. + wemin: + # remap lake, saltwater, landicet + remap_water: true + # remap catch(cn) + remap_catch: true + analysis: + bkg: true + aqua: False + lcv: false + +slurm: + account: + qos: + constraint: diff --git a/GEOS_Util/post/remap_restart/remap_questions.py b/GEOS_Util/post/remap_restart/remap_questions.py new file mode 100755 index 00000000..8547bfc0 --- /dev/null +++ b/GEOS_Util/post/remap_restart/remap_questions.py @@ -0,0 +1,369 @@ +#!/usr/bin/env python3 +# +# source install/bin/g5_modules +# +# Newer GEOS code should load a module with GEOSpyD Python3 if not run: +# module load python/GEOSpyD/Min4.10.3_py3.9 +# + +import os +import subprocess +import shlex +import ruamel.yaml +import shutil +import questionary +import glob + +def fvcore_name(x): + ymdh = x['input:shared:yyyymmddhh'] + time = ymdh[0:8] + '_'+ymdh[8:10] + files = glob.glob(x['input:shared:rst_dir']+'/*fvcore_*'+time+'*') + if len(files) ==1 : + fname = files[0] + print('\nFound ' + fname) + return fname + else: + fname = x['input:shared:rst_dir']+'/fvcore_internal_rst' + if os.path.exists(fname): + print('\nFound ' + fname) + return fname + return False + +def tmp_merra2_dir(x): + cmd = 'whoami' + p = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) + (user, err) = p.communicate() + p_status = p.wait() + print(user) + user = user.decode().split() + tmp_merra2 = '/discover/nobackup/'+user[0]+'/merra2_tmp'+x['input:shared:yyyymmddhh']+'/' + return tmp_merra2 + +def we_default(tag): + default_ = '26' + if tag in ['INL','GITNL', '525'] : default_ = '13' + return default_ + +def zoom_default(x): + zoom_ = '8' + fvcore = fvcore_name(x) + if fvcore : + fvrst = os.path.dirname(os.path.realpath(__file__)) + '/fvrst.x -h ' + cmd = fvrst + fvcore + print(cmd +'\n') + p = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) + (output, err) = p.communicate() + p_status = p.wait() + ss = output.decode().split() + x['input:shared:agrid'] = "C"+ss[0] # save for air parameter + lat = int(ss[0]) + lon = int(ss[1]) + if (lon != lat*6) : + sys.exit('This is not a cubed-sphere grid fvcore restart. Please contact SI team') + ymdh = x.get('input:shared:yyyymmddhh') + ymdh_ = str(ss[3]) + str(ss[4])[0:2] + if (ymdh_ != ymdh) : + print("Warning: The date in fvcore is different from the date you input\n") + zoom = lat /90.0 + zoom_ = str(int(zoom)) + if zoom < 1 : zoom_ = '1' + if zoom > 8 : zoom_ = '8' + if x['input:shared:MERRA-2'] : + zoom_ = '2' + return zoom_ + +def get_account(): + cmd = 'id -gn' + p = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) + (accounts, err) = p.communicate() + p_status = p.wait() + accounts = accounts.decode().split() + return accounts[0] + +def ask_questions(): + + questions = [ + { + "type": "confirm", + "name": "input:shared:MERRA-2", + "message": "Would you like to remap archived MERRA-2 restarts?", + "default": False, + }, + { + "type": "path", + "name": "input:shared:rst_dir", + "message": "Enter the directory containing restart files to be remapped:", + "when": lambda x: not x['input:shared:MERRA-2'], + }, + { + "type": "text", + "name": "input:shared:yyyymmddhh", + "message": "From what restart date/time would you like to remap? (must be 10 digits: yyyymmddhh)", + "validate": lambda text: len(text)==10 , + }, + { + "type": "path", + "name": "input:shared:rst_dir", + "message": "Enter a directory to which the archived MERRA-2 archive files can be copied: ", + "default": lambda x: tmp_merra2_dir(x), + "when": lambda x: x['input:shared:MERRA-2'], + }, + + { + "type": "path", + "name": "output:shared:out_dir", + "message": "Enter the directory for new restarts:\n" + }, + + { + "type": "text", + "name": "input:shared:agrid", + "message": "Enter input atmospheric grid: \n C12 C180 C1000 \n C24 C360 C1440 \n C48 C500 C2880 \n C90 C720 C5760 \n ", + "default": 'C360', + # if it is merra-2 or has_fvcore, agrid is deduced + "when": lambda x: not x['input:shared:MERRA-2'] and not fvcore_name(x), + }, + + { + "type": "text", + "name": "output:shared:agrid", + "message": "Enter new atmospheric grid: \n C12 C180 C1000 \n C24 C360 C1440 \n C48 C500 C2880 \n C90 C720 C5760 \n ", + "default": 'C360', + }, + + { + "type": "text", + "name": "output:air:nlevel", + "message": "Enter new atmospheric levels: (71 72 91 127 132 137 144 181)", + "default": "72", + }, + + { + "type": "select", + "name": "input:shared:model", + "message": "Select input ocean model:", + "choices": ["data", "MOM5", "MOM6"], + "default": "data", + "when": lambda x: not x['input:shared:MERRA-2'] + }, + + { + "type": "select", + "name": "input:shared:ogrid", + "message": "Input Ocean grid: \n \ + Data Ocean Grids \n \ + ------------------- \n \ + 360X180 (Reynolds) \n \ + 1440X720 (MERRA-2) \n \ + 2880X1440 (OSTIA) \n \ + CS = same as atmospere grid (OSTIA cubed-sphere) \n", + "choices": ['360X180','1440X720','2880X1440','CS'], + "when": lambda x: x.get('input:shared:model') == 'data' and not x['input:shared:MERRA-2'], + }, + + { + "type": "select", + "name": "output:shared:model", + "message": "Select ocean model for new restarts:", + "choices": ["data", "MOM5", "MOM6"], + "default": "data", + }, + { + "type": "select", + "name": "output:shared:ogrid", + "message": "Select new ocean grid:", + "choices": ['360X180','1440X720','2880X1440','CS'], + "when": lambda x: x['output:shared:model'] == 'data', + }, + + { + "type": "select", + "name": "input:shared:ogrid", + "message": "Input ocean grid: \n \ + Coupled Ocean Grids \n \ + ------------------- \n \ + 72X36 \n \ + 360X200 \n \ + 720X410 \n \ + 1440X1080 \n ", + "choices": ['72X36','360X200','720X410','1440X1080'], + "when": lambda x: x.get('input:shared:model') == 'MOM5' or x.get('input:shared:model')== 'MOM6' + }, + { + "type": "select", + "name": "output:shared:ogrid", + "message": "Select new ocean grid: \n \ + Coupled Ocean Grids \n \ + ------------------- \n \ + 72X36 \n \ + 360X200 \n \ + 720X410 \n \ + 1440X1080 \n ", + "choices": ['72X36','360X200','720X410','1440X1080'], + "when": lambda x: x['output:shared:model'] != 'data', + }, + + { + "type": "text", + "name": "input:shared:tag", + "message": "Enter GCM or DAS tag for input: \n \ +Sample GCM tags \n \ +--------------- \n \ +G40 : Ganymed-4_0 ......... Heracles-5_4_p3 \n \ +ICA : Icarus .............. Jason \n \ +GITOL : 10.3 ................ 10.18 \n \ +INL : Icarus-NL ........... Jason-NL \n \ +GITNL : 10.19 ............... 10.23 \n \ +\n \ +Sample DAS tags \n \ +--------------- \n \ +5B0 : GEOSadas-5_10_0_p2 .. GEOSadas-5_11_0 \n \ +512 : GEOSadas-5_12_2 ..... GEOSadas-5_16_5\n \ +517 : GEOSadas-5_17_0 ..... GEOSadas-5_24_0_p1\n \ +525 : GEOSadas-5_25_1 ..... GEOSadas-5_29_4\n", + "default": "INL", + "when": lambda x: not x["input:shared:MERRA-2"], + }, + { + "type": "text", + "name": "output:shared:tag", + "message": "Enter GCM or DAS tag for new restarts:", + "default": "INL", + }, + + { + "type": "select", + "name": "input:shared:bc_base", + "message": "Select bcs base \n \ + discover_ops: /discover/nobackup/projects/gmao/share/gmao_ops/fvInput/g5gcm/bcs \n \ + discover_lt: /discover/nobackup/ltakacs/bcs \n \ + discover_couple: /discover/nobackup/projects/gmao/ssd/aogcm/atmosphere_bcs \n", + "choices": ["discover_ops", "discover_lt", "discover_couple", "other"], + "when": lambda x: not x['input:shared:MERRA-2'], + }, + { + "type": "path", + "name": "input:shared:alt_bcs", + "message": "Specify your own bcs absolute path (do not contain grid info) for restarts: \n ", + "when": lambda x: x.get("input:shared:bc_base")=="other", + }, + + { + "type": "select", + "name": "output:shared:bc_base", + "message": "Select bcs base for new restarts:", + "choices": ["discover_ops", "discover_lt", "discover_couple", "other"], + }, + { + "type": "path", + "name": "output:shared:alt_bcs", + "message": "Specify your own bcs path (do not contain grid info) for new restarts: \n ", + "when": lambda x: x.get("output:shared:bc_base")=="other", + }, + + { + "type": "confirm", + "name": "output:air:remap", + "message": "Would you like to remap upper air?", + "default": True, + }, + { + "type": "confirm", + "name": "output:surface:remap", + "message": "Would you like to remap surface?", + "default": True, + }, + { + "type": "confirm", + "name": "output:analysis:bkg", + "message": "Regrid bkg files?", + "default": False, + }, + { + "type": "confirm", + "name": "output:analysis:lcv", + "message": "Write lcv?", + "default": False, + }, + { + "type": "text", + "name": "input:surface:wemin", + "message": "What is value of Wemin?", + "default": lambda x: we_default(x.get('input:shared:tag')) + }, + { + "type": "text", + "name": "output:surface:wemout", + "message": "What is value of Wemout?", + "default": lambda x: we_default(x.get('output:shared:tag')) + }, + { + "type": "text", + "name": "input:surface:zoom", + "message": "What is value of zoom [1-8]?", + "default": lambda x: zoom_default(x) + }, + { + "type": "text", + "name": "output:shared:expid", + "message": "Enter new restarts expid:", + "default": "", + }, + + { + "type": "text", + "name": "slurm:qos", + "message": "qos?", + "default": "debug", + }, + + { + "type": "text", + "name": "slurm:account", + "message": "account?", + "default": get_account(), + }, + { + "type": "select", + "name": "slurm:constraint", + "message": "constraint?", + "choices": ['hasw', 'sky', 'cas'], + }, + ] + answers = questionary.prompt(questions) + if not answers.get('input:shared:model') : + answers['input:shared:model'] = 'data' + answers['input:shared:rst_dir'] = os.path.abspath(answers['input:shared:rst_dir']) + if answers.get('output:shared:ogrid') == 'CS': + answers['output:shared:ogrid'] = answers['output:shared:agrid'] + answers['output:shared:out_dir'] = os.path.abspath(answers['output:shared:out_dir']) + + return answers + +def get_config_from_questionary(): + answers = ask_questions() + config = {} + config['input'] = {} + config['input']['shared'] = {} + config['input']['surface'] = {} + config['output'] = {} + config['output']['shared'] = {} + config['output']['air'] = {} + config['output']['surface'] = {} + config['output']['analysis'] = {} + config['slurm'] = {} + for key, value in answers.items(): + keys = key.split(":") + if len(keys) == 2: + config[keys[0]][keys[1]] = value + if len(keys) == 3: + config[keys[0]][keys[1]][keys[2]] = value + + return config + +if __name__ == "__main__": + config = get_config_from_questionary() + yaml = ruamel.yaml.YAML() + with open("raw_answers.yaml", "w") as f: + yaml.dump(config, f) + diff --git a/GEOS_Util/post/remap_restart/remap_restarts.py b/GEOS_Util/post/remap_restart/remap_restarts.py new file mode 100644 index 00000000..f91e4af3 --- /dev/null +++ b/GEOS_Util/post/remap_restart/remap_restarts.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 +# +# source install/bin/g5_modules +# +# Newer GEOS code should load a module with GEOSpyD Python3 if not run: +# module load python/GEOSpyD/Min4.10.3_py3.9 +# + +import sys, getopt +import ruamel.yaml +import questionary +from remap_questions import get_config_from_questionary +from remap_params import * +from remap_upper import * +from remap_lake_landice_saltwater import * +from remap_analysis import * +from remap_catchANDcn import * + +def main(argv): + config_yaml = '' + try: + opts, args = getopt.getopt(argv,"hc:", ['config_file=']) + except getopt.GetoptError: + print('Usage: remap_restarts.py -c remap_params.yaml or ./remap_restarts.py ') + sys.exit('command line error') + for opt, arg in opts: + if opt == '-h': + print('''\nThere are two ways to use this script to remap restarts. \n + 1) Use an exsiting config file to remap: \n + ./remap_restarts.py -c my_config.yaml \n \n + 2) Use questionary to convert template remap_params.tpl to \n + remap_params.yaml and then remap. \n + ./remap_restarts.py \n + \nHelp message: \n + 1) Each individual script can be executed independently + 2) remap_questions.py generates raw_answer.yaml + 3) remap_params.py uses raw_answer.yaml and remap_params.tpl as inputs and generates remap_params.yaml + 4) remap_upper.py uses remap_params.yaml as input for remapping + 5) remap_lake_landice_saltwater.py uses remap_params.yaml as input for remapping + 6) remap_catchANDcn.py uses remap_params.yaml as input for remapping + 7) remap_analysis.py uses remap_params.yaml as input for remapping ''') + sys.exit(0) + if opt in("-c", "--config_file"): + config_yaml = arg + + params = '' + if config_yaml == '': + config = get_config_from_questionary() + params = remap_params(config) + params.convert_to_yaml() + config_yaml = 'remap_params.yaml' + + with open(config_yaml, 'r') as f: + for line in f.readlines(): + trimmed_line = line.rstrip() + if trimmed_line: # Don't print blank lines + print(trimmed_line) + + print('\n') + questions = [ + { + "type": "confirm", + "name": "Continue", + "message": "Above is the YAML config file, would you like to continue?", + "default": True + },] + answer = questionary.prompt(questions) + + if not answer['Continue'] : + print("\nYou answered not to continue, exiting.\n") + sys.exit(0) + + # copy merra2 files from archives + if params: + params.copy_merra2() + + # upper air + upper = upperair(params_file=config_yaml) + upper.remap() + + # lake, landice and saltwater + lls = lake_landice_saltwater(params_file=config_yaml) + lls.remap() + + # catchANDcn + catch = catchANDcn(params_file=config_yaml) + catch.remap() + + # analysis + ana = analysis(params_file=config_yaml) + ana.remap() + +if __name__ == '__main__' : + main(sys.argv[1:]) + diff --git a/GEOS_Util/post/remap_restart/remap_upper.py b/GEOS_Util/post/remap_restart/remap_upper.py new file mode 100755 index 00000000..090eb7af --- /dev/null +++ b/GEOS_Util/post/remap_restart/remap_upper.py @@ -0,0 +1,272 @@ +#!/usr/bin/env python3 +# +import os +import ruamel.yaml +import subprocess +import shlex +import shutil +import glob +from remap_base import remap_base + +class upperair(remap_base): + def __init__(self, **configs): + super().__init__(**configs) + + def remap(self): + if not self.config['output']['air']['remap'] : + return + self.air_restarts =["fvcore_internal_rst" , + "moist_internal_rst" , + "agcm_import_rst" , + "agcm_internal_rst" , + "carma_internal_rst" , + "achem_internal_rst" , + "geoschemchem_internal_rst", + "gmichem_internal_rst" , + "gocart_internal_rst" , + "hemco_internal_rst" , + "mam_internal_rst" , + "matrix_internal_rst" , + "pchem_internal_rst" , + "stratchem_internal_rst" , + "ss_internal_rst" , + "du_internal_rst" , + "cabr_internal_rst" , + "cabc_internal_rst" , + "caoc_internal_rst" , + "ni_internal_rst" , + "su_internal_rst" , + "tr_internal_rst"] + restarts_in = self.find_rst() + if len(restarts_in) == 0: + return + + print( "\nRemapping upper air......\n") + config = self.config + cwdir = os.getcwd() + bindir = os.path.dirname(os.path.realpath(__file__)) + in_bcsdir = config['input']['shared']['bcs_dir'] + out_bcsdir = config['output']['shared']['bcs_dir'] + out_dir = config['output']['shared']['out_dir'] + + if not os.path.exists(out_dir) : os.makedirs(out_dir) + print( "cd " + out_dir) + os.chdir(out_dir) + + tmpdir = out_dir+'/upper_data/' + if os.path.exists(tmpdir) : subprocess.call(['rm', '-rf',tmpdir]) + print ("mkdir " + tmpdir) + os.makedirs(tmpdir) + + print( "cd " + tmpdir) + os.chdir(tmpdir) + + print('\nUpper air restart file names link from "_rst" to "_restart_in" \n') + + types = 'z.bin' + type_str = subprocess.check_output(['file','-b', restarts_in[0]]) + type_str = str(type_str) + if type_str.find('Hierarchical') >=0: + types = 'z.nc4' + yyyymmddhh_ = str(config['input']['shared']['yyyymmddhh']) + suffix = yyyymmddhh_[0:8]+'_'+yyyymmddhh_[8:10]+ types + + for rst in restarts_in : + f = os.path.basename(rst).split('_rst')[0].split('.')[-1]+'_restart_in' + cmd = '/bin/ln -s ' + rst + ' ' + f + print('\n'+cmd) + subprocess.call(shlex.split(cmd)) + + # link topo file + topoin = glob.glob(in_bcsdir+'/topo_DYN_ave*')[0] + cmd = '/bin/ln -s ' + topoin + ' .' + print('\n'+cmd) + subprocess.call(shlex.split(cmd)) + + topoout = glob.glob(out_bcsdir+'/topo_DYN_ave*')[0] + cmd = '/bin/ln -s ' + topoout + ' topo_dynave.data' + print('\n'+cmd) + subprocess.call(shlex.split(cmd)) + #fname = os.path.basename(topoout) + #cmd = '/bin/ln -s ' + fname + ' topo_dynave.data' + #print('\n'+cmd) + #subprocess.call(shlex.split(cmd)) + + agrid = config['output']['shared']['agrid'] + if agrid[0].upper() == 'C': + imout = int(agrid[1:]) + else: + exit("Only support cs grid so far") + + if (imout <90): + NPE = 12; nwrit = 1 + elif (imout<=180): + NPE = 24; nwrit = 1 + elif (imout<=500): + NPE = 96; nwrit = 1 + elif (imout==720): + NPE = 192; nwrit = 2 + elif (imout==1000): + NPE = 384; nwrit = 2 + elif (imout==1440): + NPE = 576; nwrit = 2 + elif (imout==2000): + NPE = 768; nwrit = 2 + elif (imout>=2880): + NPE = 5400; nwrit= 6 + + QOS = "#SBATCH --qos="+config['slurm']['qos'] + if NPE > 532: QOS = "###" + QOS + CONSTR = "#SBATCH --constraint=" + config['slurm']['constraint'] + + log_name = out_dir+'/remap_upper_log' + + remap_template="""#!/bin/csh -xf +#!/bin/csh -xf +#SBATCH --account={account} +#SBATCH --time=1:00:00 +#SBATCH --ntasks={NPE} +#SBATCH --job-name=remap_upper +#SBATCH --output={log_name} +{QOS} +{CONSTR} + +unlimit + +cd {out_dir}/upper_data +source {Bin}/g5_modules +/bin/touch input.nml + +# The MERRA fvcore_internal_restarts don't include W or DZ, but we can add them by setting +# HYDROSTATIC = 0 which means HYDROSTATIC = FALSE + +if ($?I_MPI_ROOT) then + # intel scaling suggestions + #-------------------------- + setenv I_MPI_ADJUST_ALLREDUCE 12 + setenv I_MPI_ADJUST_GATHERV 3 + + setenv I_MPI_SHM_HEAP_VSIZE 512 + setenv PSM2_MEMORY large + setenv I_MPI_EXTRA_FILESYSTEM 1 + setenv I_MPI_EXTRA_FILESYSTEM_FORCE gpfs + setenv ROMIO_FSTYPE_FORCE "gpfs:" + +else if ($?MVAPICH2) then + + setenv MV2_ENABLE_AFFINITY 0 + setenv MV2_ENABLE_AFFINITY 0 + setenv SLURM_DISTRIBUTION block + setenv MV2_MPIRUN_TIMEOUT 100 + setenv MV2_GATHERV_SSEND_THRESHOLD 256 + +endif +set infiles = () +set outfils = () +foreach infile ( *_restart_in ) + if ( $infile == fvcore_internal_restart_in ) continue + if ( $infile == moist_internal_restart_in ) continue + + set infiles = ( $infiles $infile ) + set outfil = `echo $infile | sed "s/restart_in/rst_out/"` + set outfils = ($outfils $outfil) +end + +set interp_restartsX = {Bin}/interp_restarts.x +if ( $#infiles ) then + set ioflag = "-input_files $infiles -output_files $outfils" + set ftype = `file -Lb --mime-type fvcore_internal_restart_in` + if ($ftype =~ *stream*) then + set interp_restartsX = {Bin}/interp_restarts_bin.x + endif +else + set ioflag = "" +endif + +set drymassFLG = {drymassFLG} +if ($drymassFLG) then + set dmflag = "" +else + set dmflag = "-scalers F" +endif + +{Bin}/esma_mpirun -np {NPE} $interp_restartsX -im {imout} -lm {nlevel} \\ + -do_hydro {hydrostatic} $ioflag $dmflag -nwriter {nwrit} + +""" + account = config['slurm']['account'] + drymassFLG = config['input']['air']['drymass'] + hydrostatic = config['input']['air']['hydrostatic'] + nlevel = config['output']['air']['nlevel'] + + remap_upper_script = remap_template.format(Bin=bindir, account = account, \ + out_dir = out_dir, log_name = log_name, drymassFLG = drymassFLG, \ + imout = imout, nwrit = nwrit, NPE = NPE, \ + QOS = QOS,CONSTR = CONSTR, nlevel = nlevel, hydrostatic = hydrostatic) + + script_name = './remap_upper.j' + + upper = open(script_name,'wt') + upper.write(remap_upper_script) + upper.close() + + interactive = os.getenv('SLURM_JOB_ID', default = None) + + if (interactive) : + print('interactive mode\n') + ntasks = os.getenv('SLURM_NTASKS', default = None) + if ( not ntasks): + nnodes = int(os.getenv('SLURM_NNODES', default = '1')) + ncpus = int(os.getenv('SLURM_CPUS_ON_NODE', default = '28')) + ntasks = nnodes * ncpus + ntasks = int(ntasks) + if (ntasks < NPE ): + print("\nYou should have at least {NPE} cores. Now you only have {ntasks} cores ".format(NPE=NPE, ntasks=ntasks)) + + subprocess.call(['chmod', '755', script_name]) + print(script_name+ ' 1>' + log_name + ' 2>&1') + os.system(script_name + ' 1>' + log_name+ ' 2>&1') + else : + print('sbatch -W '+ script_name +'\n') + subprocess.call(['sbatch', '-W', script_name]) + +# +# post process +# + expid = config['output']['shared']['expid'] + if (expid) : + expid = expid + '.' + else: + expid = '' + suffix = '_rst.' + suffix + for out_rst in glob.glob("*_rst*"): + filename = expid + os.path.basename(out_rst).split('_rst')[0].split('.')[-1]+suffix + print('\n Move ' + out_rst + ' to ' + out_dir+"/"+filename) + shutil.move(out_rst, out_dir+"/"+filename) + + print('\n Move remap_upper.j to ' + out_dir) + shutil.move('remap_upper.j', out_dir+"/remap_upper.j") + print('cd ' + cwdir) + os.chdir(cwdir) + + def find_rst(self): + rst_dir = self.config['input']['shared']['rst_dir'] + yyyymmddhh_ = str(self.config['input']['shared']['yyyymmddhh']) + time = yyyymmddhh_[0:8]+'_'+yyyymmddhh_[8:10] + restarts_in=[] + for f in self.air_restarts : + files = glob.glob(rst_dir+ '/*'+f+'*'+time+'*') + if len(files) >0: + restarts_in.append(files[0]) + if (len(restarts_in) == 0) : + print("\n try restart file names without time stamp\n") + for f in self.air_restarts : + fname = rst_dir+ '/'+f + if os.path.exists(fname): + restarts_in.append(fname) + + return restarts_in + +if __name__ == '__main__' : + air = upperair(params_file='remap_params.yaml') + air.remap()