-
Notifications
You must be signed in to change notification settings - Fork 0
/
model.py
67 lines (63 loc) · 2.19 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import tensorflow as tf
class ASR(tf.keras.Model):
"""
Class for defining the end to end ASR model.
This model consists of a 1D convolutional layer followed by a bidirectional LSTM
followed by a fully connected layer applied at each timestep.
This is a bare-bones architecture.
Experiment with your own architectures to get a good WER
"""
def __init__(
self,
filters,
kernel_size,
conv_stride,
conv_border,
n_lstm_units,
n_dense_units,
_out_units_,
):
super(ASR, self).__init__()
self.conv_layer = tf.keras.layers.Conv1D(
filters,
kernel_size,
strides=conv_stride,
padding=conv_border,
activation="relu",
)
self.blstm_layer1 = tf.keras.layers.Bidirectional(
tf.keras.layers.LSTM(
n_lstm_units, return_sequences=True, dropout=0.35, activation="tanh"
)
)
self.blstm_layer2 = tf.keras.layers.Bidirectional(
tf.keras.layers.LSTM(
n_lstm_units, return_sequences=True, dropout=0.35, activation="tanh"
)
)
self.blstm_layer3 = tf.keras.layers.Bidirectional(
tf.keras.layers.LSTM(
n_lstm_units, return_sequences=True, dropout=0.35, activation="tanh"
)
)
self.dense_layer1 = tf.keras.layers.Dense(n_dense_units, activation="relu")
self.dense_layer2 = tf.keras.layers.Dense(n_dense_units, activation="relu")
self.dense_layer3 = tf.keras.layers.Dense(_out_units_)
def call(self, x):
"Calls different layers one-by-one"
# print("Start : ",x.shape)
x = self.conv_layer(x)
# print("Conv1 : ",x.shape)
x = self.blstm_layer1(x)
# print("BiLSTM1 : ",x.shape)
x = self.blstm_layer2(x)
# print("BiLSTM2 : ",x.shape)
x = self.blstm_layer3(x)
# print("BiLSTM3 : ",x.shape)
x = self.dense_layer1(x)
# print("Dense1 : ",x.shape)
x = self.dense_layer2(x)
# print("Dense2 : ",x.shape)
x = self.dense_layer3(x)
# print("Dense3 : ",x.shape)
return x