Skip to content

Commit

Permalink
MileStone: DANN working for MNIST -> MNISTM
Browse files Browse the repository at this point in the history
Source MNIST acc 99%
Target MNISTM acc 93% (no label)
---
Note: without adaptation MNISTM acc 50%
  • Loading branch information
Bob Vo committed Feb 24, 2021
1 parent f4ce3ec commit 659a340
Show file tree
Hide file tree
Showing 11 changed files with 648 additions and 180 deletions.
271 changes: 142 additions & 129 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,129 +1,142 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
.python-version

# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock

# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
.python-version

# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock

# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/

#Torch checkpoint
*.pth
*.pt

#checkpoints and model saving
saved/

#data
data/

#vscode workspace
*.code-workspace
23 changes: 15 additions & 8 deletions base/base_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,14 @@ class BaseTrainer:
"""
Base class for all trainers
"""
def __init__(self, model, criterion, metric_ftns, optimizer, config):

def __init__(self, model, metric_ftns, optimizer, config):
self.config = config
self.logger = config.get_logger('trainer', config['trainer']['verbosity'])
self.logger = config.get_logger(
'trainer', config['trainer']['verbosity'])

self.model = model
self.criterion = criterion
#self.criterion = criterion
self.metric_ftns = metric_ftns
self.optimizer = optimizer

Expand All @@ -39,8 +41,9 @@ def __init__(self, model, criterion, metric_ftns, optimizer, config):

self.checkpoint_dir = config.save_dir

# setup visualization writer instance
self.writer = TensorboardWriter(config.log_dir, self.logger, cfg_trainer['tensorboard'])
# setup visualization writer instance
self.writer = TensorboardWriter(
config.log_dir, self.logger, cfg_trainer['tensorboard'])

if config.resume is not None:
self._resume_checkpoint(config.resume)
Expand All @@ -52,6 +55,7 @@ def _train_epoch(self, epoch):
:param epoch: Current epoch number
"""
# Implement in child method instead
raise NotImplementedError

def train(self):
Expand All @@ -76,7 +80,8 @@ def train(self):
try:
# check whether model performance improved or not, according to specified metric(mnt_metric)
improved = (self.mnt_mode == 'min' and log[self.mnt_metric] <= self.mnt_best) or \
(self.mnt_mode == 'max' and log[self.mnt_metric] >= self.mnt_best)
(self.mnt_mode ==
'max' and log[self.mnt_metric] >= self.mnt_best)
except KeyError:
self.logger.warning("Warning: Metric '{}' is not found. "
"Model performance monitoring is disabled.".format(self.mnt_metric))
Expand Down Expand Up @@ -115,7 +120,8 @@ def _save_checkpoint(self, epoch, save_best=False):
'monitor_best': self.mnt_best,
'config': self.config
}
filename = str(self.checkpoint_dir / 'checkpoint-epoch{}.pth'.format(epoch))
filename = str(self.checkpoint_dir /
'checkpoint-epoch{}.pth'.format(epoch))
torch.save(state, filename)
self.logger.info("Saving checkpoint: {} ...".format(filename))
if save_best:
Expand Down Expand Up @@ -148,4 +154,5 @@ def _resume_checkpoint(self, resume_path):
else:
self.optimizer.load_state_dict(checkpoint['optimizer'])

self.logger.info("Checkpoint loaded. Resume training from epoch {}".format(self.start_epoch))
self.logger.info(
"Checkpoint loaded. Resume training from epoch {}".format(self.start_epoch))
Loading

0 comments on commit 659a340

Please sign in to comment.