-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcombine.py
288 lines (221 loc) · 11.6 KB
/
combine.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
#combine.py: Script to combine the images from different observing periods.
#Created on 08-02-2016, updated (to Python 3.6) on 30-10-2018.
#Marjorie Decleir
#Note: This script assumes that the largest image covers all other images!
#Import the necessary packages.
import os
import numpy as np
from astropy.io import fits
from astropy import wcs
from ccdproc import wcs_project, CCDData
import pdb
import configloader
#Load the configuration file.
config = configloader.load_config()
#Specify the galaxy, the path to the working directory and the different years.
galaxy = config['galaxy']
path = config['path'] + galaxy + "/working_dir/"
years = config['years']
enlarge = config['enlarge']
add_x = int(config['add_xpix'])
add_y = int(config['add_ypix'])
#Specify the different filters.
filters = ["uw2","um2","uw1"]
#This is the main function.
def main():
#PART 1: Convert the units of the corrected images (and their uncertainties) from counts/s to counts.
#Print user information.
print("Converting units from counts/s to counts...")
#Loop over the different years.
for year in years:
yearpath = path + year + "/"
#Loop over the filters.
for filter in filters:
#Convert the units of the corrected image.
if os.path.isfile(yearpath+"sum_"+filter+"_nm_coilsszp.img"): convert(yearpath+"sum_"+filter+"_nm_coilsszp.img")
#PART 2: Reproject the images to one reference image so that they all have the same size.
#Print user information.
print("Reprojecting images...")
#Loop over the filters.
for filter in filters:
#Find out what image is the largest (based on 1 axis only) and take that image as the reference image.
size = 0
ref_path = None
for year in years:
yearpath = path + year + "/"
if os.path.isfile(yearpath+"sum_"+filter+"_nm_coilsszp_c.img"):
hdulist = fits.open(yearpath+"sum_"+filter+"_nm_coilsszp_c.img")
if size < hdulist[0].header['NAXIS1']:
size = hdulist[0].header['NAXIS1']
ref_path = yearpath
#Print user information.
if ref_path == None:
print("No images were found for filter " + filter + ". Please verify whether this is correct.")
continue
else:
print("Image sum_"+filter+"_nm_coilsszp_c.img of year " + ref_path.split("/")[-2] + " will be used as reference image. Please verify whether this image is large enough to cover all other images.")
#Open the reference image.
ref_image = fits.open(ref_path+"sum_"+filter+"_nm_coilsszp_c.img")
ref_header = ref_image[0].header
#-------------------------------------------------------------------------------------------------------------------------------
#In the case that the reference image is not large enough to cover all other images:
#Embed the reference image into a larger image by adding NaN values around the image.
if enlarge == "yes":
ref_header = embed(ref_header,add_x,add_y)
#-------------------------------------------------------------------------------------------------------------------------------
#Create empty lists for the reprojected images.
repro_skylist = []
repro_explist = []
#Loop over the years.
for year in years:
yearpath = path + year + "/"
#Reproject the image (if it is present) to the reference image.
if os.path.isfile(yearpath+"sum_"+filter+"_nm_coilsszp_c.img"):
reproject(yearpath+"sum_"+filter+"_nm_coilsszp_c.img",ref_header,repro_skylist,repro_explist)
#PART 3: Replace all NaNs in the sky images (and uncertainties), and the corresponding pixels in the exposure maps by 0s.
#Create lists with zeros for the images without NaNs.
new_skylist = np.zeros_like(repro_skylist)
new_explist = np.zeros_like(repro_explist)
#Replace the NaNs.
for i in range(len(repro_skylist)):
new_skylist[i],new_explist[i] = replaceNaN(repro_skylist[i],repro_explist[i])
#PART 4: Sum the images of the different years and calculate the Poisson noise.
#Print user information.
print("Summing the images of filter " + filter + "...")
#Sum the sky images (and uncertainties) and the exposure maps.
sum_datacube, sum_exp_data, header = sum_years(new_skylist,new_explist,filter,ref_path)
#PART 5: Normalize the total sky image.
#Print user information.
print("Normalizing the total sky image of filter " + filter + "...")
#Normalize the total sky image.
norm(sum_datacube,sum_exp_data,filter,header)
#Function for PART 1: Unit conversion.
#Function to convert the units of an image from count rates to counts.
def convert(filename):
#Open the image and the corresponding exposure map.
hdulist = fits.open(filename)
data = hdulist[0].data[0]
header = hdulist[0].header
exp_hdulist = fits.open(filename.replace("nm_coilsszp","ex"))
exp_data = exp_hdulist[1].data
#Convert the units from counts/s to counts.
new_data = data * exp_data
#Calculate the coincidence loss correction uncertainty (in counts).
coicorr_unc = hdulist[0].data[2] * new_data
#Create a frame with the zero point correction factor.
f_zp = np.full_like(data,header["ZPCORR"])
#Adapt the header. Write the converted data and uncertainties to a new image.
header["PLANE0"] = "primary (counts)"
header["PLANE2"] = "coincidence loss correction uncertainty (counts)"
header["PLANE3"] = "zero point correction factor"
del header['ZPCORR']
datacube = [new_data,hdulist[0].data[1],coicorr_unc,f_zp]
new_hdu = fits.PrimaryHDU(datacube,header)
new_hdu.writeto(filename.replace(".img","_c.img"),overwrite=True)
#Functions for PART 2: Reprojection.
#Function to embed an image into a larger image (by updating the header only).
def embed(header,addx,addy):
#Update the header information.
header['NAXIS1'] = header['NAXIS1'] + addx*2
header['NAXIS2'] = header['NAXIS2'] + addy*2
header['CRPIX1'] = header['CRPIX1'] + addx
header['CRPIX2'] = header['CRPIX2'] + addy
return header
#Function to reproject an image to a reference image.
def reproject(filename,ref_header,skylist,explist):
#Open the image.
hdulist = fits.open(filename)
datacube = hdulist[0].data
header = hdulist[0].header
#Create a list with zeros for the reprojected datacube.
repro_datacube = [0] * len(datacube)
#Loop over the different frames in the datacube.
for i,data in enumerate(datacube):
#Create a CCDData class object.
data_ccd = CCDData(data,header=header,unit="count",wcs=wcs.WCS(header).celestial)
#Reproject the data to the reference data.
repro_datacube[i] = np.asarray(wcs_project(data_ccd,wcs.WCS(ref_header).celestial,target_shape=(ref_header['NAXIS2'],ref_header['NAXIS1'])))
#Temporary workaround to update the header of the image.
new_data = wcs_project(data_ccd,wcs.WCS(ref_header).celestial,target_shape=(ref_header['NAXIS2'],ref_header['NAXIS1']))
new_data.write(filename.replace(".img","_r.img"),format="fits",overwrite=True)
temp_hdu = fits.open(filename.replace(".img","_r.img"))
new_header = temp_hdu[0].header
#Append the reprojected datacube to the list and write it to a new image.
skylist.append(np.array(repro_datacube))
new_hdu = fits.PrimaryHDU(repro_datacube,new_header)
new_hdu.writeto(filename.replace(".img","_r.img"),overwrite=True)
#Reproject the exposure map.
data_exp_ccd = CCDData.read(filename.replace("nm_coilsszp_c","ex"),unit="count",hdu=1)
repro_data_exp = wcs_project(data_exp_ccd,wcs.WCS(ref_header).celestial,target_shape=(ref_header['NAXIS2'],ref_header['NAXIS1']))
#Append the reprojected data to a list and write it to a new image.
explist.append(np.array(repro_data_exp))
repro_data_exp.write(filename.replace("nm_coilsszp_c","ex_r"),format="fits",overwrite=True)
#Function for PART 3: Replacing NaNs by 0s.
#Function to replace the NaNs in the sky image (and uncertainties), and the corresponding pixels in the exposure map by 0s.
def replaceNaN(datacube,expdata):
#Create a mask for the NaNs in the primary frame of the datacube.
mask = np.isnan(datacube[0])
#Replace the masked pixels by 0s in all frames of the datacube.
for data in datacube:
data[mask] = 0.
#Replace the masked pixels by 0s in the exposure map.
expdata[mask] = 0.
return datacube,expdata
#Function for PART 4: Sum the years and calculate the Poisson noise.
#Function to sum the images (and uncertainties) and exposure maps and to calculate the Poisson noise.
def sum_years(skylist,explist,filter,ref_path):
#Sum the sky frames.
sum_datacube = np.zeros_like(skylist[0])
for datacube in skylist:
sum_datacube = sum_datacube + datacube
#Calculate the relative coincidence loss correction uncertainty.
primary = sum_datacube[0]
coicorr_rel = sum_datacube[2] / primary
#Calculate a weighted-average coincidence loss correction factor.
denom = np.zeros_like(skylist[0][0])
for datacube in skylist:
notnan = np.isfinite(datacube[0]/datacube[1])
denom[notnan] += (datacube[0]/datacube[1])[notnan]
f_coi = primary / denom
#Calculate a weighted-average zero point correction factor.
denom = np.zeros_like(skylist[0][0])
for datacube in skylist:
notnan = np.isfinite(datacube[0]/datacube[3])
denom[notnan] += (datacube[0]/datacube[3])[notnan]
f_zp = primary / denom
#Calculate the relative Poisson noise.
poisson_rel = 1./np.sqrt(primary)
#Obtain the header of the reference image and adjust it.
header = fits.open(ref_path+"sum_"+filter+"_nm_coilsszp_c_r.img")[0].header
for i in range(len(skylist[0])):
del header["PLANE"+str(i)]
header["PLANE0"] = "primary (counts)"
header["PLANE1"] = "average coincidence loss correction factor"
header["PLANE2"] = "relative coincidence loss correction uncertainty (fraction)"
header["PLANE3"] = "average zero point correction factor"
header["PLANE4"] = "relative Poisson noise (fraction)"
#Write the new datacube to a new image.
new_datacube = np.array([primary,f_coi,coicorr_rel,f_zp,poisson_rel])
sum_hdu = fits.PrimaryHDU(new_datacube,header)
sum_hdu.writeto(path+"total_sum_"+filter+"_sk.fits",overwrite=True)
#Sum the exposure maps.
sum_exp_data = np.zeros_like(explist[0])
for data in explist:
sum_exp_data = sum_exp_data + data
#Write the summed exposure map to a new image.
exp_header = fits.open(ref_path+"sum_"+filter+"_ex_r.img")[0].header
sum_exp_hdu = fits.PrimaryHDU(sum_exp_data,exp_header)
sum_exp_hdu.writeto(path+"total_sum_"+filter+"_ex.fits",overwrite=True)
return new_datacube,sum_exp_data,header
#Function for PART 5: Normalizing the total sky image.
#Function to normalize an image.
def norm(datacube,exp_data,filter,header):
#Normalize the image
datacube[0] = datacube[0]/exp_data
#Adjust the header.
header["PLANE0"] = "primary (counts/s)"
#Save the normalized image.
norm_hdu = fits.PrimaryHDU(datacube,header)
norm_hdu.writeto(path+"total_sum_"+filter+"_nm.fits",overwrite=True)
if __name__ == '__main__':
main()