-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathbiLSTM.py
93 lines (74 loc) · 3.02 KB
/
biLSTM.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
# coding=utf-8
import os
import math
import numpy as np
import tensorflow as tf
from commonModelFunc import *
from tensorflow.contrib import rnn
class BiLSTM(CommonModelFunc):
def __init__(self, FLAGS, insDataPro, insCNNModel):
self.FLAGS = FLAGS
self.insDataPro = insDataPro
self.insCNNModel = insCNNModel
# Get a graph for bidirectional LSTM
def getBiLSTM(self):
# Network Parameters
num4Input = self.FLAGS.embeddingDimension # Same as the shape of output from hROIPooling
timeStep = self.insDataPro.num4Features4Instance # Same as number of features for each instance
num4HiddenUnits4LSTM = self.FLAGS.num4HiddenUnits4LSTM # The dimensionality of hidden output
num4Classes = self.FLAGS.num4Classes # The number of ATC classes
# ===== LSTM layer =====
with tf.variable_scope("lstmLayer"):
# yLabel for classification
self.yLabel4Classification = tf.placeholder(
tf.float32,
[None, num4Classes],
name = "yLabel4Classification")
name4W, name4B = "output4LSTMW", "output4LSTMB"
name4Z, name4H = "output4LSTMZ", "output4LSTMH"
name4Hiddens = "finalState4LSTM"
outputW4LSTM = self.init_weight_variable(
name4W,
[2 * num4HiddenUnits4LSTM,
num4Classes])
outputB4LSTM = self.init_bias_variable(
name4B,
[num4Classes])
#Define lstm cells: forward cell and backward direction cell
lstmFwCell = rnn.BasicLSTMCell(num4HiddenUnits4LSTM, forget_bias = 1.0)
lstmBwCell = rnn.BasicLSTMCell(num4HiddenUnits4LSTM, forget_bias = 1.0)
# Get lstm cell output
try:
self.hiddenOutputs, _, _ = rnn.static_bidirectional_rnn(
lstmFwCell,
lstmBwCell,
self.insCNNModel.output4FixedSize4LSTM,
dtype = tf.float32)
except Exception: # Old TensorFlow version only returns outputs not states
self.hiddenOutputs = rnn.static_bidirectional_rnn(
lstmFwCell,
lstmBwCell,
self.insCNNModel.output4FixedSize4LSTM,
dtype = tf.float32)
#self.finalState = self.init_weight_variable("name", [1, 256])
#self.finalState4LSTM = tf.assign(
# self.finalState,
# self.hiddenOutputs[-1],
# name = name4Hiddens)
self.outputZ4LSTM = tf.add(
tf.matmul(
self.hiddenOutputs[-1],
outputW4LSTM),
outputB4LSTM,
name = name4Z)
self.outputH4LSTM = tf.nn.sigmoid(self.outputZ4LSTM, name = name4H)
# ===== Loss layer for LSTM =====
with tf.variable_scope("loss4ClassificationLayer"):
name4Loss4Classification = "loss4Classification"
self.loss4Classification = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(
logits = self.outputZ4LSTM,
labels = self.yLabel4Classification),
name = name4Loss4Classification)
self.trainStep = tf.train.AdamOptimizer(
self.FLAGS.learningRate).minimize(self.loss4Classification)