-
Notifications
You must be signed in to change notification settings - Fork 1
/
CNN_LSTM_train.py
75 lines (64 loc) · 2.9 KB
/
CNN_LSTM_train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
# fine-turned CNN with Bidirectional LSTM with single output
import numpy as np
from keras import Model, Input
from keras.callbacks import ReduceLROnPlateau, EarlyStopping, TensorBoard
from keras.layers import LSTM, Bidirectional, TimeDistributed, Dense, Activation, Embedding
from keras.models import load_model
from modules.metrics import ComputeMetrics
from modules.utils import read_features, read_from_file
from modules.DataGenerator import DataGenerator
from TCN import lrcn
if __name__ == '__main__':
"""
Training CNN-BidirectionalLSTM in an end-to-end manner
"""
batch_size = 8
n_classes = 7
n_nodes = 200
n_timesteps = 30
n_epoches = 200
local_train_pair = '/Users/seanxiang/data/cholec80/train_labels/labels.txt'
local_vali_pair = '/Users/seanxiang/data/cholec80/vali_labels/labels.txt'
local_model_path = '/Users/seanxiang/data/trained/'
remote_train_pair = '/home/cxia8134/data/old_labels/1-41.txt'
remote_vali_pair = '/home/cxia8134/data/old_labels/41-51.txt'
remote_model_path = '/home/cxia8134/dev/baseline/trained/'
model_name = 'baseline_1.h5'
lr_reducer = ReduceLROnPlateau(factor=np.sqrt(0.1), cooldown=0, patience=5, min_lr=0.5e-6, mode='auto')
early_stopper = EarlyStopping(monitor='val_loss', min_delta=1e-6, patience=10)
tensor_board = TensorBoard('log/' + model_name)
train_pair = read_from_file(local_train_pair)
vali_pair = read_from_file(local_vali_pair)
train_generator = DataGenerator(train_pair, n_classes, batch_size)
vali_generator = DataGenerator(vali_pair, n_classes, batch_size)
# # defines model input
# # inputs = Input(shape=(batch_size, n_timesteps, n_nodes))
#
# # fine-turned ResNet50
# model = load_model(local_model_path + model_name)
#
# # remove the last softmax layer
# features_model = Model(inputs=model.layers[1].output, outputs=model.layers[-2].output)
#
# # add LSTM layer with single output
# lstm_model = Bidirectional(LSTM(n_nodes,
# dropout=0.25,
# recurrent_dropout=0.25,
# return_sequences=True))(features_model.output)
#
# lstm_model = TimeDistributed(Dense(n_classes, activation="softmax"))(lstm_model)
# lstm_model.summary()
#
# lstm_model.compile(loss='categorical_crossentropy',
# optimizer='adam',
# metrics=['accuracy'])
input_shape = (n_timesteps, 224, 224, 4)
model = lrcn(input_shape=input_shape,
n_classes=n_classes)
model.fit_generator(generator=train_generator,
validation_data=vali_generator,
epochs=n_epoches,
verbose=1,
workers=6,
use_multiprocessing=True,
callbacks=[lr_reducer, early_stopper, tensor_board])