Skip to content

Commit

Permalink
chore(ml): adjust hyper-parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
DerYeger committed Oct 9, 2024
1 parent e93d7a4 commit 9fb7ca3
Show file tree
Hide file tree
Showing 4 changed files with 4 additions and 5 deletions.
3 changes: 1 addition & 2 deletions ml/gnn/src/gnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

num_epochs = 2000
start_epoch = 0
patience = 5
patience = 10

layout = Layout()
layout.split_column(Layout(name="datasets"), Layout(name="models"))
Expand Down Expand Up @@ -60,7 +60,6 @@
test_dataset.num_classes,
)

num_uml_types = 193
num_node_features = train_dataset.num_features
num_edge_features = train_dataset.num_edge_features
hidden_channels = max_num_classes * 2
Expand Down
2 changes: 1 addition & 1 deletion ml/gnn/src/model/gat.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def __init__(
heads=heads,
)
self.activation = ReLU()
self.dropout = Dropout(0.1)
self.dropout = Dropout(0.2)
self.classifier = GATConv(
in_channels=hidden_channels * heads,
out_channels=out_channels,
Expand Down
2 changes: 1 addition & 1 deletion ml/gnn/src/model/gcn.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def __init__(
super(GCNModel, self).__init__("GCN", layout=layout)
self.embed = GCNConv(num_node_features, hidden_channels)
self.activation = ReLU()
self.dropout = Dropout(0.1)
self.dropout = Dropout(0.2)
self.classifier = GCNConv(hidden_channels, out_channels)
self.optimizer = torch.optim.Adam(self.parameters(), lr=0.01)
self.criterion = torch.nn.CrossEntropyLoss()
Expand Down
2 changes: 1 addition & 1 deletion ml/tree-lstm/src/paper/mdeoperation.py
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ class Args:
"no_attention": False,
"no_pf": False,
"no_train": False,
"patience": 5,
"patience": 10,
}
)

Expand Down

0 comments on commit 9fb7ca3

Please sign in to comment.