-
Notifications
You must be signed in to change notification settings - Fork 2
/
Ensemble.py
70 lines (64 loc) · 3.08 KB
/
Ensemble.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import keras
from keras.preprocessing import text,sequence
from keras.layers import Input, BatchNormalization, Softmax
from keras.layers.core import *
from keras.models import *
from keras.callbacks import Callback,LambdaCallback
from keras.optimizers import *
import keras.backend as K
from hyperopt import fmin, tpe, hp, STATUS_OK, Trials
from LearnUtil import *
def model(params,withbiofeature=True,cnn_trainable=False,rnn_trainable=False,load_weight=False):
onehot_input = Input(name='onehot_input', shape = (21,4, 1,))
biological_input = Input(name='bio_input', shape = (11,))
cnnmodel = load_model(params['cnn_load_file'])
rnnmodel = load_model(params['rnn_load_file'])
cnnmodel.trainable = cnn_trainable
rnnmodel.trainable = rnn_trainable
x_cnn = cnnmodel(onehot_input)
x_rnn = rnnmodel(onehot_input)
x = keras.layers.concatenate([x_rnn,x_cnn])
if withbiofeature:
######Biofeat######
x_bio = mlp(biological_input,
output_layer_activation='tanh',output_dim=1,output_use_bias=True,
hidden_layer_num=params['bio_fc_hidden_layer_num'],hidden_layer_units_num=params['bio_fc_hidden_layer_units_num'],
hidden_layer_activation='relu',dropout=params['bio_fc_dropout'],
name='biofeat_embedding')
output = keras.layers.concatenate([x,x_bio])
else :
output = x
output = Dense(units=1,kernel_initializer=keras.initializers.RandomNormal(mean=0.4, stddev=0.05),
use_bias=False,bias_initializer='zero',name='last_weight_avg')(output)
model = Model(inputs=[onehot_input, biological_input],
outputs=[output])
if load_weight:
model.load_weights(params['ensemble_load_file'])
return model
if load_weight:
model.load_weights(params['ensemble_load_file'])
return model
def train(params,
train_input,train_biofeat,train_label,
test_input,test_biofeat,test_label,withbiofeature,
cnn_trainable=False,rnn_trainable=False,load_weight=False,issave=True):
result = Result()
m = model(params,withbiofeature=withbiofeature,
cnn_trainable=cnn_trainable,rnn_trainable=rnn_trainable,load_weight=load_weight)
batch_size = params['train_batch_size']
learningrate = params['train_base_learning_rate']
epochs = params['train_epochs_num']
m.compile(loss='mse', optimizer=Adam(lr=learningrate))
batch_end_callback = LambdaCallback(on_epoch_end=
lambda batch,logs:
print(get_score_at_test(m,[test_input,test_biofeat],result,test_label,
issave=issave,savepath=params['ensemble_save_file'])))
m.fit([train_input,train_biofeat],train_label,
batch_size=batch_size,
epochs=epochs,
verbose=2,
validation_split=0.1,
callbacks=[batch_end_callback])
weight = m.get_layer('last_weight_avg').get_weights()
print(weight)
return {'loss': -1*result.Best, 'status': STATUS_OK}