-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathquickreduce.py
163 lines (132 loc) · 6.34 KB
/
quickreduce.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
#!/C/Users/kremin/Anaconda3/python3.exe
# coding: utf-8
import matplotlib
matplotlib.use('Qt5Agg')
import warnings
warnings.filterwarnings("ignore", module="matplotlib")
import os
import sys
import pickle as pkl
from collections import OrderedDict
from pyM2FS.pyM2FS_funcs import digest_filenumbers,boolify,read_io_config,\
read_obs_config,get_steps,make_mtlz_wrapper
from pyM2FS.FieldData import FieldData
from pyM2FS.pyM2FS_io import FileManager
from pyM2FS.instrument import InstrumentState
import configparser
def pipeline(maskname=None,obs_config_name=None,io_config_name=None, pipe_config_name='./configs/pipeline.ini'):
"""
Call the pipeline
inputs: maskname=None
obs_config_name=None (becomes 'obs_{}.ini'.format(maskname) if None)
io_config_name=None (becomes 'io_{}.ini'.format(maskname) if None)
pipe_config_name='pipeline_config.ini'
*
- If maskname specified, the filenames of obs_* and io_* must have the format
mentioned above
- If just filenames specified, maskname will be taken from config files
- If both maskname and filenames are specified, it is up to you to ensure they
are consistent
* If pipeline_config_name is not specified, the default of "pipeline.ini"
should be present
"""
if '/configs' not in pipe_config_name and not os.path.exists(pipe_config_name):
pipe_config_name = './configs/{}'.format(pipe_config_name)
pipe_config = configparser.ConfigParser(interpolation=configparser.ExtendedInterpolation())
pipe_config.read(pipe_config_name)
if maskname is not None:
pipe_config['GENERAL']['mask_name'] = maskname
else:
try:
maskname = pipe_config['GENERAL']['mask_name']
except:
raise (IOError, "I don't know the necessary configuration file information. Exiting")
## Check that the configs are there or defined in the pipeline conf file
## read in the confs if they are there, otherwise return none
obs_config = read_obs_config(obs_config_name, pipe_config['CONFS'], maskname)
io_config = read_io_config(io_config_name, pipe_config, maskname)
#### Beginning of Code
## Ingest steps and determine where to start
steps, start, pipe_config = get_steps(pipe_config)
## Interpret the filenumbers specified in the configuration files
str_filenumbers = OrderedDict(obs_config['FILENUMBERS'])
obs_config.remove_section('FILENUMBERS')
filenumbers = digest_filenumbers(str_filenumbers)
## Load the filemanager and instrument status based on the configuration files
filemanager = FileManager( io_config )
instrument = InstrumentState( obs_config )
obj_info = obs_config['TARGET']
## Get specific pipeline options
pipe_options = dict(pipe_config['PIPE_OPTIONS'])
if boolify(pipe_options['make_mtl']) and \
io_config['SPECIALFILES']['mtl'].lower() != 'none' and \
os.path.exists(filemanager.directory.dirname_dict['wavecalib']['write']) and \
len(os.listdir(filemanager.directory.dirname_dict['wavecalib']['write']))>0:
from pyM2FS.create_merged_target_list import make_mtl
make_mtl(io_config,filenumbers['science'][0],vizier_catalogs=['sdss12'], \
overwrite_field=False, overwrite_redshifts = False)
## Load the data and instantiate the pipeline functions within the data class
data = FieldData(filenumbers, filemanager=filemanager, instrument=instrument,
startstep=start, pipeline_options=pipe_options,obj_info=obj_info)
## For all steps marked true, run those steps on the data
for step,do_this_step in steps.items():
do_step_bool = boolify(do_this_step)
if not do_step_bool:
print("\nSkipping {}".format(step))
continue
## Get ready for the requested step
print("\nPerforming {}:".format(step))
data.proceed_to(step=step)
## Check that the currently loaded data is relevant to the
## requested step. If not, it will raise an error
data.check_data_ready_for_current_step()#step=step)
## run the step
data.run_step()#step=step)
## cosmic ray step autosaves during the process
## for other steps, save the results
if step not in ['cr_remove','wave_calib']:
try:
data.write_all_filedata()#step=step)
except:
outfile = os.path.join(io_config['PATHS']['data_product_loc'], io_config['FILETEMPLATES']['pickled_datadump'])
print("Save data failed to complete. Dumping data to {}".format(outfile))
with open(outfile,'wb') as crashsave:
pkl.dump(data.all_hdus,crashsave)
raise()
if boolify(pipe_options['make_mtlz']) and ((io_config['SPECIALFILES']['mtlz'].lower()) != 'none') and \
os.path.exists(filemanager.directory.dirname_dict['zfit']['write']) and \
len(os.listdir(filemanager.directory.dirname_dict['zfit']['write'])) > 0:
cams = instrument.cameras
make_mtlz_wrapper(data, filemanager, io_config, step, do_step_bool, pipe_options, cams)
else:
return
def parse_command_line(argv):
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-m", "--maskname",
action="store", type="string", dest="maskname")
parser.add_option("-i", "--iofile",
action="store", type="string", dest="io_config_name")
parser.add_option("-o", "--obsfile",
action="store", type="string", dest="obs_config_name")
parser.add_option("-p", "--pipefile",
action="store", type="string", dest="pipe_config_name")
if argv is None:
(options, args) = parser.parse_args()
else:
(options, args) = parser.parse_args(argv)
return options.__dict__
if __name__ == '__main__':
if len(sys.argv)>1:
print("Detected input parameters: ",sys.argv[1:])
input_variables = parse_command_line(sys.argv)
nonvals = []
for key,val in input_variables.items():
if val is None:
nonvals.append(key)
for key in nonvals:
input_variables.pop(key)
print("Received input variables: ", input_variables)
else:
input_variables = {}
pipeline(**input_variables)