forked from JiguangLi/Blaze_Function
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathLS_Smoothing.py
225 lines (188 loc) · 8.87 KB
/
LS_Smoothing.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 29 08:58:09 2019
@author: jiguangli
"""
# Lab Source Spectrum Smoothing Using AFS Algorithm
# Load essential packages
import pandas as pd
import numpy as np
import alphashape
import shapely
from rpy2.robjects import r
import rpy2.robjects as robjects
from pathlib import Path
# Input variables:
# order: the order of raw lab source spectrum to smooth. It is an n by 2 matrix,
# where n is the number of pixels. Each row is the wavelength and intensity at
# each pixel.
# a: the parameter a should be a number between 3 and 12. It determines the value
# of alpha in calculating alphashape, which is defined as the range of wavelength
# diveded by a. The default value of a is 6.
# q: the parameter q, uppder q quantile within each window will be used to fit
# a local polynomial model.
# d: the smoothing parameter for local polynomial regression, which is the
# proportion of neighboring points to be used when fitting at one point.
# qs: the parameter q_s mentioned in the paper. The upper q_s quantile is used
# in the stop criterion of the iteration.
# Define the LSS function
def LSS(order, a=6, q = 0.95, d=0.25, qs=0.97):
# Default value of q, d, qs are 0.95, 0.25, 0.97.
# Change the column names and format of the dataset.
order.columns=["wv","intens"]
# n records the number of pixels.
n=order.shape[0]
# Use a variable called wavelength to save the original wavelength data.
wavelength=order["wv"].values
ref=order["wv"]
# Variable u is the parameter u in the step 1 of AFS algorithm. It scales the intensity vector.
u=(ref.max()-ref.min())/10/order["intens"].max()
order["intens"] = order["intens"]*u
# This chunk of code excludes the spikes in the raw lab source spectrum.
# After this chunk of code, the number of pixels in variable order will decrease.
Q_qs=np.quantile(np.abs(order["intens"].values[1:]-order["intens"].values[0:(n-1)]),qs)
Q_99=np.quantile(np.abs(order["intens"].values[1:]-order["intens"].values[0:(n-1)]),0.99)
while (Q_99> Q_qs):
temp_vec= np.abs(order["intens"].values[1:]-order["intens"].values[0:(n-1)])
mask=np.array([n for n,i in enumerate(temp_vec) if i> Q_99])
mask=mask+1
order.drop(mask,inplace=True)
order.reset_index(drop=True,inplace=True)
n= order.shape[0]
Q_99=np.quantile(np.abs(order["intens"].values[1:]-order["intens"].values[0:(n-1)]),0.99)
# Let alpha be 1/6 of the wavelength range of the whole order.
alpha= (order["wv"].max()-order["wv"].min())/a
# This chunk of code detects loops in the boundary of the alpha shape.
# Ususally there is only one loop(polygon).
# Variable loop is a list.
# The indices of the k-th loop are recorded in the k-th element of variable loop.
loops=[]
# Variable points is a list that represents all the sample point (lambda_i,y_i)
points=[(order["wv"][i],order["intens"][i]) for i in range(order.shape[0])]
alpha_shape = alphashape.alphashape(points, 1/alpha)
# Input Vairables:
# polygon: shapely polygon object
# return Variable:
# variable indices is a list recording the indices of the vertices in the polygon
def find_vertices(polygon):
coordinates=list(polygon.exterior.coords)
return [ref[ref==coordinates[i][0]].index[0] for i in range(len(coordinates))]
# if alpha_shape is just a polygon, there is only one loop
# if alpha_shape is a multi-polygon, we interate it and find all the loops.
if (isinstance(alpha_shape,shapely.geometry.polygon.Polygon)):
temp= find_vertices(alpha_shape)
loops.append(temp)
else:
for polygon in alpha_shape:
temp= find_vertices(polygon)
loops.append(temp)
# Use the loops to get the set W_alpha.
# Variable Wa is a vector recording the indices of points in W_alpha.
Wa=[0]
for loop in loops:
temp=loop
temp=loop[:-1]
temp=[i for i in temp if (i<n-1)]
max_k=max(temp)
min_k=min(temp)
len_k=len(temp)
as_k=temp
if((as_k[0] == min_k and as_k[len_k-1] == max_k)==False):
index_max= as_k.index(max_k)
index_min= as_k.index(min_k)
if (index_min < index_max):
as_k =as_k[index_min:(index_max+1)]
else:
as_k= as_k[index_min:]+as_k[0:(index_max+1)]
Wa=Wa+as_k
Wa.sort()
Wa=Wa[1:]
# AS is an n by 2 matrix recording tilde(AS_alpha). Each row is the wavelength and intensity of one pixel.
AS=order.copy()
for i in range(n-1):
indices=[m for m,v in enumerate(Wa) if v > i]
if(len(indices)!=0):
index=indices[0]
a= Wa[index-1]
b= Wa[index]
AS["intens"][i]= AS["intens"][a]+(AS["intens"][b]-AS["intens"][a])*((AS["wv"][i]-AS["wv"][a])/(AS["wv"][b]-AS["wv"][a]))
else:
# AS=AS.drop(list(range(i, n)))
break
# Run a local polynomial on tilde(AS_alpha), as described in step 3 of the AFS algorithm.
# Use the function loess_1d() to run a second order local polynomial.
# Variable y_result is the predicted output from input x
x=AS["wv"].values
y=AS["intens"].values
# covert x and y to R vectors
x = robjects.FloatVector(list(x))
y = robjects.FloatVector(list(y))
df = robjects.DataFrame({"x": x, "y": y})
# run loess (haven't found a way to specify "control" parameters)
loess_fit = r.loess("y ~ x", data=df, degree = 2, span = d, surface="direct")
B1 =r.predict(loess_fit, x)
# Add a new column called select to the matrix order.
# order["select"] records hat(y^(1)).
select= order["intens"].values/B1
order["select"]=select
# Make indices in Wa to the format of small windows.
# Each row of the variable window is a pair of neighboring indices in Wa.
window= np.column_stack((Wa[0:len(Wa)-1],Wa[1:]))
# This chunk of code select the top q quantile of points in each window.
# The point indices are recorded in variable index, which is S_alpha, q in step 4
# of the AFS algorithm.
index=[0]
for i in range(window.shape[0]):
loc_window= window[i,]
temp = order.loc[loc_window[0]:loc_window[1]]
index_i= temp[temp["select"] >= np.quantile(temp["select"],q)].index
index=index+list(index_i)
index=np.unique(index[1:])
index=np.sort(index)
# Run Loess for the last time
x_2=order.iloc[index]["wv"].values
y_2=order.iloc[index]["intens"].values
x_2 = robjects.FloatVector(list(x_2))
y_2 = robjects.FloatVector(list(y_2))
df2 = robjects.DataFrame({"x_2": x_2, "y_2": y_2})
loess_fit2 = r.loess("y_2 ~ x_2", data=df2, degree = 2, span = d,surface="direct")
r_wavelength= robjects.FloatVector(list(wavelength))
y_final= r.predict(loess_fit2, r_wavelength)
result= np.array(y_final)/u
# Return the smoothed lab source spectrum.
df=pd.DataFrame({"wv":wavelength,"intens":result})
return df
# Input variable:
# directory: a string representing the directory of an order
# name: a string represnting the file name. The file can either be in csv or fits format.
# The first column and the second column must be wavelength and intensity respectively.
# a: the parameter a should be a number between 3 and 12. It determines the value
# of alpha in calculating alphashape, which is defined as the range of wavelength
# diveded by a. The default value of a is 6.
# q: the parameter q, uppder q quantile within each window will be used to fit
# a local polynomial model.
# d: the smoothing parameter for local polynomial regression, which is the
# proportion of neighboring points to be used when fitting at one point.
# qs: the parameter q_s mentioned in the paper. The upper q_s quantile is used
# in the stop criterion of the iteration.
def LSS_d(directory,name, a=6, q = 0.95, d = 0.25, qs=0.97):
path=directory+"/"+name
p = Path(path)
if(p.exists()):
if(name[-4:]==".csv"):
csv= pd.read_csv(path, sep=',')
data = csv.iloc[:,0:2]
result= LSS(data,a,q,d,qs)
return result
elif(name[-5:]==".fits"):
from astropy.table import Table
data_fits = Table.read(path, format='fits')
data_fits=data_fits[data_fits.colnames[0],data_fits.colnames[1]]
data=data_fits.to_pandas()
result= LSS(data,a,q,d,qs)
return result
else:
raise Exception("The format of this file is neither csv nor fits")
else:
raise Exception("directory or file doesn't exist")