-
Notifications
You must be signed in to change notification settings - Fork 5
/
config.py
115 lines (77 loc) · 4.71 KB
/
config.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
from keras.optimizers import Adam, SGD, RMSprop
class Parameters(object):
def __init__(self):
self.params = dict()
""" Model Parameters """
# architecture_params - (list) Set number of neurons in each layer.
self.params['layers_size'] = [1000, 1000, 1000]
# use_dropout - (bool) Use dropout if "True"
self.params['use_dropout'] = True
# dropout_prob - (float) Dropout deletion probability (applies when use_dropout=True).
self.params['dropout_prob'] = 0.3
# model_name - (string) The model's name (used when saving weights file).
self.params['model_name'] = 'DeepTract'
# model_weights_save_dir - (string) Path for saving the model's files after training is done.
self.params['model_weights_save_dir'] = # Enter path here
""" Training Parameters """
# learning_rate -(float) Initial learning rate in training phase.
self.params['learning_rate'] = 1e-4
# optimizer - (keras.optimizers) Optimizer to be used in training (Adam/SGD/RMSprop).
self.params['optimizer'] = Adam
# batch_size - (int) Data batch size for training.
self.params['batch_size'] = 8
# epochs - (int) Number of training epochs.
self.params['epochs'] = 30
# decay_LR - (bool) Whether to use learning rate decay.
self.params['decay_LR'] = True
# decay_LR_patience - (int) Number of training epochs to wait in case validation performance does not improve
# before learning rate decay is applied.
self.params['decay_LR_patience'] = 2
# decay_factor - (float [0, 1]) In an LR decay step, the existing LR will be multiplied by this factor.
self.params['decay_factor'] = 0.6
# early_stopping - (bool) Whether to use early stopping.
self.params['early_stopping'] = True
# early_stopping - (int) Number of epochs to wait before training is terminated when validation performance
# does not improve.
self.params['early_stopping_patience'] = 5
# save_checkpoints - (bool) Whether to save model checkpoints during training.
self.params['save_checkpoints'] = True
""" Data Parameters """
# DWI_path - (string) Path to the input DWI directory (should include .nii, .bvecs and .bvals files).
self.params['DWI_path'] = # Enter path here
# tractogram_path - (string) Path to a tractogram (.trk file) to be used as training labels.
# NOTE: only relevant for training, not for tracking stage.
self.params['tractogram_path'] = # Enter path here
# train_val_ratio - (float [0, 1]) Training/Validation split ratio for training.
# NOTE: only relevant for training, not for tracking stage.
self.params['train_val_ratio'] = 0.9
# brain_mask_path - (string) Path to a binary brain mask file that will be applied to the input DWI volume.
# Insert None if such mask is not available.
self.params['brain_mask_path'] = # Enter path here
# wm_mask_path - (string) Path to a binary white natter mask file that will be applied to the input DWI volume.
# Insert None if such mask is not available.
self.params['wm_mask_path'] = # Enter path here
""" Tracking (post-training tractography) Parameters """
# model_load_path - Path to the trained model's files (should include a .json and .hdf5 files).
self.params['trained_model_dir'] = # Enter path here
# tractography_type - (string) 'deterministic' or 'probabilistic'
self.params['tractography_type'] = 'deterministic'
# save_tractogram - (boolean) whether to save the resulting tractogram as a .trk file
self.params['save_tractogram'] = True
# save_dir - (boolean) full path for saving the output tractogram
self.params['save_dir'] = # Enter path here
# num_seeds - (int) Number of seed points for tractography.
self.params['num_seeds'] = 100000
# track_batch_size - (int) Number of streamlines tracked simultaneously.
self.params['track_batch_size'] = 500
# step_size - (float) Tractography step size (in voxels).
self.params['step_size'] = 0.5
# max_angle - (float) Maximum allowed streamline angle (in degrees).
self.params['max_angle'] = 60
# max_length - (float) Maximum allowed streamline length (in mm).
self.params['max_length'] = 200
# min_length - (float) Maximum allowed streamline length (in mm).
self.params['min_length'] = 20
# entropy_params (list) - [a, b, c] constants used to define the entropy threshold (see paper for more
# details).
self.params['entropy_params'] = [3, 10, 4.5]