Skip to content

Commit

Permalink
Add Feedforward layer into THP model
Browse files Browse the repository at this point in the history
  • Loading branch information
iLampard committed Sep 16, 2024
1 parent a2e254d commit 01551fb
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 3 deletions.
6 changes: 5 additions & 1 deletion easy_tpp/model/torch_model/torch_baselayer.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,11 @@ def forward(self, x, mask):
else:
return x
else:
return self.self_attn(x, x, x, mask)
x = self.self_attn(x, x, x, mask)
if self.feed_forward is not None:
return self.feed_forward(x)
else:
return x


class TimePositionalEncoding(nn.Module):
Expand Down
13 changes: 11 additions & 2 deletions easy_tpp/model/torch_model/torch_thp.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,22 +27,31 @@ def __init__(self, model_config):

self.layer_temporal_encoding = TimePositionalEncoding(self.d_model, device=self.device)

self.factor_intensity_base = torch.empty([1, self.num_event_types], device=self.device)
self.factor_intensity_decay = torch.empty([1, self.num_event_types], device=self.device)
self.factor_intensity_base = nn.Parameter(torch.empty([1, self.num_event_types], device=self.device))
self.factor_intensity_decay = nn.Parameter(torch.empty([1, self.num_event_types], device=self.device))
nn.init.xavier_normal_(self.factor_intensity_base)
nn.init.xavier_normal_(self.factor_intensity_decay)

# convert hidden vectors into event-type-sized vector
self.layer_intensity_hidden = nn.Linear(self.d_model, self.num_event_types)
self.softplus = nn.Softplus()

# Add MLP layer
# Equation (5)
self.feed_forward = nn.Sequential(
nn.Linear(self.d_model, self.d_model * 2),
nn.ReLU(),
nn.Linear(self.d_model * 2, self.d_model)
)

self.stack_layers = nn.ModuleList(
[EncoderLayer(
self.d_model,
MultiHeadAttention(self.n_head, self.d_model, self.d_model, self.dropout,
output_linear=False),

use_residual=False,
feed_forward=self.feed_forward,
dropout=self.dropout
) for _ in range(self.n_layers)])

Expand Down

0 comments on commit 01551fb

Please sign in to comment.