-
Notifications
You must be signed in to change notification settings - Fork 11
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #20 from Degiacomi-Lab/f_fileformats
File format independent data loading
- Loading branch information
Showing
16 changed files
with
3,835 additions
and
299 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
name: Tests | ||
|
||
on: [push] | ||
|
||
jobs: | ||
build-linux: | ||
runs-on: ubuntu-latest | ||
strategy: | ||
max-parallel: 5 | ||
|
||
steps: | ||
- uses: actions/checkout@v4 | ||
- name: Set up Python 3.8 | ||
uses: actions/setup-python@v3 | ||
with: | ||
python-version: '3.8' | ||
- name: Add conda to system path | ||
run: | | ||
# $CONDA is an environment variable pointing to the root of the miniconda directory | ||
echo $CONDA/bin >> $GITHUB_PATH | ||
- name: Install dependencies | ||
run: | | ||
conda install -y python=3.8 | ||
conda env update --file environment.yml --name base | ||
- name: Test | ||
run: | | ||
cd test | ||
python test_openmm_plugin.py | ||
python test_pdbdata.py | ||
python test_trainer.py | ||
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,44 +1,50 @@ | ||
import sys, os | ||
sys.path.insert(0, os.path.join(os.path.abspath(os.pardir),'src')) | ||
import sys | ||
import os | ||
|
||
sys.path.insert(0, os.path.join(os.path.abspath(os.pardir), "src")) | ||
from molearn.data import PDBData | ||
from molearn.trainers import OpenMM_Physics_Trainer | ||
from molearn.models.foldingnet import AutoEncoder | ||
import torch | ||
|
||
|
||
if __name__ == '__main__': | ||
|
||
def main(): | ||
##### Load Data ##### | ||
data = PDBData() | ||
data.import_pdb('data/MurD_closed_selection.pdb') | ||
data.import_pdb('data/MurD_open_selection.pdb') | ||
data.import_pdb( | ||
"./clustered/MurD_open_selection_CLUSTER_aggl_train.dcd", | ||
"./clustered/MurD_open_selection_NEW_TOPO.pdb", | ||
) | ||
data.fix_terminal() | ||
data.atomselect(atoms = ['CA', 'C', 'N', 'CB', 'O']) | ||
data.atomselect(atoms=["CA", "C", "N", "CB", "O"]) | ||
|
||
##### Prepare Trainer ##### | ||
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') | ||
device = torch.device("cuda" if torch.cuda.is_available() else "cpu") | ||
trainer = OpenMM_Physics_Trainer(device=device) | ||
|
||
trainer.set_data(data, batch_size=8, validation_split=0.1, manual_seed = 25) | ||
trainer.prepare_physics(remove_NB = True) | ||
|
||
trainer.set_autoencoder(AutoEncoder, out_points = data.dataset.shape[-1]) | ||
trainer.prepare_optimiser() | ||
trainer.set_data(data, batch_size=8, validation_split=0.1, manual_seed=25) | ||
trainer.prepare_physics(remove_NB=True) | ||
|
||
trainer.set_autoencoder(AutoEncoder, out_points=data.dataset.shape[-1]) | ||
trainer.prepare_optimiser() | ||
|
||
##### Training Loop ##### | ||
#Keep training until loss does not improve for 32 consecutive epochs | ||
# Keep training until loss does not improve for 32 consecutive epochs | ||
|
||
runkwargs = dict( | ||
log_filename='log_file.dat', | ||
log_folder='xbb_foldingnet_checkpoints', | ||
checkpoint_folder='xbb_foldingnet_checkpoints', | ||
) | ||
log_filename="log_file.dat", | ||
log_folder="xbb_foldingnet_checkpoints", | ||
checkpoint_folder="xbb_foldingnet_checkpoints", | ||
) | ||
|
||
best = 1e24 | ||
while True: | ||
trainer.run(max_epochs = 32+trainer.epoch,**runkwargs) | ||
if not best>trainer.best: | ||
trainer.run(max_epochs=32 + trainer.epoch, **runkwargs) | ||
if not best > trainer.best: | ||
break | ||
best = trainer.best | ||
print(f'best {trainer.best}, best_filename {trainer.best_name}') | ||
print(f"best {trainer.best}, best_filename {trainer.best_name}") | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
Oops, something went wrong.