From de1f5aa1fc9a9c0f09101042a6a0fd6272cadc65 Mon Sep 17 00:00:00 2001
From: Maciej Lech <maciej.lech@reef.pl>
Date: Tue, 1 Sep 2020 18:45:36 +0200
Subject: [PATCH 01/15] Add support for bucket to bucket sync

---
 README.md                              |  1 +
 b2sdk/bucket.py                        |  4 +-
 b2sdk/sync/action.py                   | 69 ++++++++++++++++++++++++++
 b2sdk/sync/policy.py                   | 56 ++++++++++++++++++++-
 b2sdk/sync/policy_manager.py           | 22 +++++---
 b2sdk/sync/sync.py                     | 25 ++++++----
 b2sdk/transfer/outbound/copy_source.py | 12 +++--
 test/unit/v0/test_sync.py              |  5 --
 test/unit/v1/test_sync.py              |  7 ---
 9 files changed, 164 insertions(+), 37 deletions(-)

diff --git a/README.md b/README.md
index e0f7bfcf7..a69274797 100644
--- a/README.md
+++ b/README.md
@@ -36,6 +36,7 @@ b2sdk>=0.0.0,<1.0.0
 ## Not released yet
 
 * Drop Python 2 and Python 3.4 support :tada:
+* Support for bucket to bucket sync
 
 ## 1.1.4 (2020-07-15)
 
diff --git a/b2sdk/bucket.py b/b2sdk/bucket.py
index d0facde52..d322dc96d 100644
--- a/b2sdk/bucket.py
+++ b/b2sdk/bucket.py
@@ -687,10 +687,10 @@ def copy(
         """
 
         copy_source = CopySource(file_id, offset=offset, length=length)
-        if length is None:
+        if not length:
             # TODO: it feels like this should be checked on lower level - eg. RawApi
             validate_b2_file_name(new_file_name)
-            return self.api.services.upload_manager.copy_file(
+            return self.api.services.copy_manager.copy_file(
                 copy_source,
                 new_file_name,
                 content_type=content_type,
diff --git a/b2sdk/sync/action.py b/b2sdk/sync/action.py
index 1efc5c5dc..ffc7acdb4 100644
--- a/b2sdk/sync/action.py
+++ b/b2sdk/sync/action.py
@@ -267,6 +267,75 @@ def __str__(self):
         )
 
 
+class B2CopyAction(AbstractAction):
+    """
+    File copying action.
+    """
+
+    def __init__(
+        self, relative_name, b2_file_name, file_id, dest_b2_file_name, mod_time_millis, size
+    ):
+        """
+        :param relative_name: a relative file name
+        :type relative_name: str
+        :param b2_file_name: a name of a remote file
+        :type b2_file_name: str
+        :param file_id: a file ID
+        :type file_id: str
+        :param dest_b2_file_name: a name of a destination remote file
+        :type dest_b2_file_name: str
+        :param mod_time_millis: file modification time in milliseconds
+        :type mod_time_millis: int
+        :param size: a file size
+        :type size: int
+        """
+        self.relative_name = relative_name
+        self.b2_file_name = b2_file_name
+        self.file_id = file_id
+        self.dest_b2_file_name = dest_b2_file_name
+        self.mod_time_millis = mod_time_millis
+        self.size = size
+
+    def get_bytes(self):
+        """
+        Return file size.
+
+        :rtype: int
+        """
+        return self.size
+
+    def do_action(self, bucket, reporter):
+        """
+        Perform the copying action, returning only after the action is completed.
+
+        :param bucket: a Bucket object
+        :type bucket: b2sdk.bucket.Bucket
+        :param reporter: a place to report errors
+        """
+        bucket.copy(
+            self.file_id,
+            self.dest_b2_file_name,
+            length=self.size,
+            progress_listener=SyncFileReporter(reporter)
+        )
+
+    def do_report(self, bucket, reporter):
+        """
+        Report the copying action performed.
+
+        :param bucket: a Bucket object
+        :type bucket: b2sdk.bucket.Bucket
+        :param reporter: a place to report errors
+        """
+        reporter.print_completion('copy ' + self.relative_name)
+
+    def __str__(self):
+        return (
+            'b2_copy(%s, %s, %s, %d)' %
+            (self.b2_file_name, self.file_id, self.dest_b2_file_name, self.mod_time_millis)
+        )
+
+
 class B2DeleteAction(AbstractAction):
     def __init__(self, relative_name, b2_file_name, file_id, note):
         """
diff --git a/b2sdk/sync/policy.py b/b2sdk/sync/policy.py
index 4fd4c5d53..149c678b5 100644
--- a/b2sdk/sync/policy.py
+++ b/b2sdk/sync/policy.py
@@ -14,7 +14,7 @@
 import logging
 
 from ..exception import DestFileNewer
-from .action import LocalDeleteAction, B2DeleteAction, B2DownloadAction, B2HideAction, B2UploadAction
+from .action import LocalDeleteAction, B2CopyAction, B2DeleteAction, B2DownloadAction, B2HideAction, B2UploadAction
 from .exception import InvalidArgument
 
 ONE_DAY_IN_MS = 24 * 60 * 60 * 1000
@@ -305,6 +305,60 @@ class DownAndKeepDaysPolicy(DownPolicy):
     pass
 
 
+class CopyPolicy(AbstractFileSyncPolicy):
+    """
+    File is copied (server-side).
+    """
+    DESTINATION_PREFIX = 'b2://'
+    SOURCE_PREFIX = 'b2://'
+
+    def _make_transfer_action(self):
+        return B2CopyAction(
+            self._source_file.name,
+            self._source_folder.make_full_path(self._source_file.name),
+            self._source_file.latest_version().id_,
+            self._dest_folder.make_full_path(self._source_file.name),
+            self._get_source_mod_time(),
+            self._source_file.latest_version().size,
+        )
+
+
+class CopyAndDeletePolicy(CopyPolicy):
+    """
+    File is copied (server-side) and the delete flag is SET.
+    """
+
+    def _get_hide_delete_actions(self):
+        for action in super()._get_hide_delete_actions():
+            yield action
+        for action in make_b2_delete_actions(
+            self._source_file,
+            self._dest_file,
+            self._dest_folder,
+            self._transferred,
+        ):
+            yield action
+
+
+class CopyAndKeepDaysPolicy(CopyPolicy):
+    """
+    File is copied (server-side) and the keepDays flag is SET.
+    """
+
+    def _get_hide_delete_actions(self):
+        for action in super()._get_hide_delete_actions():
+            yield action
+        for action in make_b2_keep_days_actions(
+            self._source_file,
+            self._dest_file,
+            self._dest_folder,
+            self._transferred,
+            self._keep_days,
+            self._now_millis,
+        ):
+            yield action
+
+
 def make_b2_delete_note(version, index, transferred):
     """
     Create a note message for delete action.
diff --git a/b2sdk/sync/policy_manager.py b/b2sdk/sync/policy_manager.py
index ff82f4a4e..adb2f091c 100644
--- a/b2sdk/sync/policy_manager.py
+++ b/b2sdk/sync/policy_manager.py
@@ -8,8 +8,9 @@
 #
 ######################################################################
 
-from .policy import DownAndDeletePolicy, DownAndKeepDaysPolicy, DownPolicy
-from .policy import UpAndDeletePolicy, UpAndKeepDaysPolicy, UpPolicy
+from .policy import CopyAndDeletePolicy, CopyAndKeepDaysPolicy, CopyPolicy, \
+    DownAndDeletePolicy, DownAndKeepDaysPolicy, DownPolicy, UpAndDeletePolicy, \
+    UpAndKeepDaysPolicy, UpPolicy
 
 
 class SyncPolicyManager(object):
@@ -87,10 +88,19 @@ def get_policy_class(self, sync_type, delete, keep_days):
                 return DownAndKeepDaysPolicy
             else:
                 return DownPolicy
-        assert False, 'invalid sync type: %s, keep_days: %s, delete: %s' % (
-            sync_type,
-            keep_days,
-            delete,
+        elif sync_type == 'b2-to-b2':
+            if delete:
+                return CopyAndDeletePolicy
+            elif keep_days:
+                return CopyAndKeepDaysPolicy
+            else:
+                return CopyPolicy
+        raise NotImplemented(
+            'invalid sync type: %s, keep_days: %s, delete: %s' % (
+                sync_type,
+                keep_days,
+                delete,
+            )
         )
 
 
diff --git a/b2sdk/sync/sync.py b/b2sdk/sync/sync.py
index 7b2b94f4a..2c635ae7e 100644
--- a/b2sdk/sync/sync.py
+++ b/b2sdk/sync/sync.py
@@ -171,11 +171,17 @@ def sync_folders(self, source_folder, dest_folder, now_millis, reporter):
         :param int now_millis: current time in milliseconds
         :param b2sdk.sync.report.SyncReport,None reporter: progress reporter
         """
+        source_type = source_folder.folder_type()
+        dest_type = dest_folder.folder_type()
+
+        if source_type != 'b2' and dest_type != 'b2':
+            raise NotImplemented('Sync between two local folders is not supported!')
+
         # For downloads, make sure that the target directory is there.
-        if dest_folder.folder_type() == 'local' and not self.dry_run:
+        if dest_type == 'local' and not self.dry_run:
             dest_folder.ensure_present()
 
-        if source_folder.folder_type() == 'local' and not self.allow_empty_source:
+        if source_type == 'local' and not self.allow_empty_source:
             source_folder.ensure_non_empty()
 
         # Make an executor to count files and run all of the actions. This is
@@ -192,23 +198,20 @@ def sync_folders(self, source_folder, dest_folder, now_millis, reporter):
         # First, start the thread that counts the local files. That's the operation
         # that should be fastest, and it provides scale for the progress reporting.
         local_folder = None
-        if source_folder.folder_type() == 'local':
+        if source_type == 'local':
             local_folder = source_folder
         if dest_folder.folder_type() == 'local':
             local_folder = dest_folder
-        if local_folder is None:
-            raise ValueError('neither folder is a local folder')
-        if reporter:
+        if reporter and local_folder is not None:
             sync_executor.submit(count_files, local_folder, reporter)
 
         # Schedule each of the actions
         bucket = None
         if source_folder.folder_type() == 'b2':
             bucket = source_folder.bucket
-        if dest_folder.folder_type() == 'b2':
+        if dest_type == 'b2':
             bucket = dest_folder.bucket
-        if bucket is None:
-            raise ValueError('neither folder is a b2 folder')
+
         total_files = 0
         total_bytes = 0
         for action in self.make_folder_sync_actions(
@@ -250,8 +253,8 @@ def make_folder_sync_actions(
         source_type = source_folder.folder_type()
         dest_type = dest_folder.folder_type()
         sync_type = '%s-to-%s' % (source_type, dest_type)
-        if (source_type, dest_type) not in [('b2', 'local'), ('local', 'b2')]:
-            raise NotImplementedError("Sync support only local-to-b2 and b2-to-local")
+        if source_type != 'b2' and dest_type != 'b2':
+            raise NotImplementedError('Sync between two local folders is not supported!')
 
         for source_file, dest_file in zip_folders(
             source_folder,
diff --git a/b2sdk/transfer/outbound/copy_source.py b/b2sdk/transfer/outbound/copy_source.py
index f304324bb..ff5ba19f2 100644
--- a/b2sdk/transfer/outbound/copy_source.py
+++ b/b2sdk/transfer/outbound/copy_source.py
@@ -13,8 +13,8 @@
 
 class CopySource(OutboundTransferSource):
     def __init__(self, file_id, offset=0, length=None):
-        if length is None and offset > 0:
-            raise ValueError('Cannot copy with non zero offset and unknown length')
+        if not length and offset > 0:
+            raise ValueError('Cannot copy with non zero offset and unknown or zero length')
         self.file_id = file_id
         self.length = length
         self.offset = offset
@@ -38,13 +38,15 @@ def is_copy(self):
         return True
 
     def get_bytes_range(self):
-        if self.length is None:
+        if not self.length:
             if self.offset > 0:
                 # auto mode should get file info and create correct copy source (with length)
-                raise ValueError('cannot return bytes range for non zero offset and unknown length')
+                raise ValueError(
+                    'cannot return bytes range for non zero offset and unknown or zero length'
+                )
             return None
 
-        return (self.offset, self.offset + self.length - 1)
+        return self.offset, self.offset + self.length - 1
 
     def get_copy_source_range(self, relative_offset, range_length):
         if self.length is not None and range_length + relative_offset > self.length:
diff --git a/test/unit/v0/test_sync.py b/test/unit/v0/test_sync.py
index b8c5495c2..8ac7bb677 100644
--- a/test/unit/v0/test_sync.py
+++ b/test/unit/v0/test_sync.py
@@ -832,11 +832,6 @@ def test_file_exclusions_inclusions_with_delete(self):
 
 
 class TestMakeSyncActions(TestSync):
-    def test_illegal_b2_to_b2(self):
-        b2_folder = FakeFolder('b2', [])
-        with self.assertRaises(NotImplementedError):
-            list(make_folder_sync_actions(b2_folder, b2_folder, FakeArgs(), 0, self.reporter))
-
     def test_illegal_local_to_local(self):
         local_folder = FakeFolder('local', [])
         with self.assertRaises(NotImplementedError):
diff --git a/test/unit/v1/test_sync.py b/test/unit/v1/test_sync.py
index 5dc847509..b7a8557b7 100644
--- a/test/unit/v1/test_sync.py
+++ b/test/unit/v1/test_sync.py
@@ -862,13 +862,6 @@ class IllegalEnum(Enum):
 
 
 class TestMakeSyncActions(TestSync):
-    def test_illegal_b2_to_b2(self):
-        b2_folder = FakeFolder('b2', [])
-        with self.assertRaises(NotImplementedError):
-            fakeargs = FakeArgs()
-            syncronizer = fakeargs.get_synchronizer()
-            list(syncronizer.make_folder_sync_actions(b2_folder, b2_folder, 0, self.reporter))
-
     def test_illegal_local_to_local(self):
         local_folder = FakeFolder('local', [])
         with self.assertRaises(NotImplementedError):

From 5702cb6fbc033bde4f4e1e14c31f28f9fc2d1bf4 Mon Sep 17 00:00:00 2001
From: Maciej Lech <maciej.lech@reef.pl>
Date: Wed, 9 Sep 2020 13:44:25 +0200
Subject: [PATCH 02/15] Add unit tests

---
 b2sdk/sync/policy_manager.py                  |   2 +-
 b2sdk/sync/sync.py                            |   2 +-
 b2sdk/v0/__init__.py                          |   5 +-
 b2sdk/v1/__init__.py                          |   4 +
 noxfile.py                                    |  15 +-
 .../{sync/deps_exception.py => apiver.py}     |   7 +-
 test/unit/conftest.py                         |  58 ++
 test/unit/sync/fixtures.py                    | 137 ++++
 test/unit/sync/test_base.py                   |  36 -
 test/unit/sync/test_exception.py              |   5 +-
 test/unit/sync/test_sync.py                   | 668 ++++++++++++++++++
 test/unit/v0/test_sync.py                     | 360 ----------
 test/unit/v1/test_sync.py                     | 532 --------------
 13 files changed, 887 insertions(+), 944 deletions(-)
 rename test/unit/{sync/deps_exception.py => apiver.py} (65%)
 create mode 100644 test/unit/conftest.py
 create mode 100644 test/unit/sync/fixtures.py
 delete mode 100644 test/unit/sync/test_base.py
 create mode 100644 test/unit/sync/test_sync.py

diff --git a/b2sdk/sync/policy_manager.py b/b2sdk/sync/policy_manager.py
index adb2f091c..61e43db96 100644
--- a/b2sdk/sync/policy_manager.py
+++ b/b2sdk/sync/policy_manager.py
@@ -95,7 +95,7 @@ def get_policy_class(self, sync_type, delete, keep_days):
                 return CopyAndKeepDaysPolicy
             else:
                 return CopyPolicy
-        raise NotImplemented(
+        raise NotImplementedError(
             'invalid sync type: %s, keep_days: %s, delete: %s' % (
                 sync_type,
                 keep_days,
diff --git a/b2sdk/sync/sync.py b/b2sdk/sync/sync.py
index 2c635ae7e..8f2e86025 100644
--- a/b2sdk/sync/sync.py
+++ b/b2sdk/sync/sync.py
@@ -175,7 +175,7 @@ def sync_folders(self, source_folder, dest_folder, now_millis, reporter):
         dest_type = dest_folder.folder_type()
 
         if source_type != 'b2' and dest_type != 'b2':
-            raise NotImplemented('Sync between two local folders is not supported!')
+            raise NotImplementedError('Sync between two local folders is not supported!')
 
         # For downloads, make sure that the target directory is there.
         if dest_type == 'local' and not self.dry_run:
diff --git a/b2sdk/v0/__init__.py b/b2sdk/v0/__init__.py
index 79d3f2a5a..3bb0dab67 100644
--- a/b2sdk/v0/__init__.py
+++ b/b2sdk/v0/__init__.py
@@ -13,5 +13,6 @@
 from b2sdk.v0.api import B2Api
 from b2sdk.v0.bucket import Bucket
 from b2sdk.v0.bucket import BucketFactory
-from .sync import make_folder_sync_actions
-from .sync import sync_folders
+from b2sdk.v0.sync import Synchronizer
+from b2sdk.v0.sync import make_folder_sync_actions
+from b2sdk.v0.sync import sync_folders
diff --git a/b2sdk/v1/__init__.py b/b2sdk/v1/__init__.py
index 7f1a45138..ee04e47d9 100644
--- a/b2sdk/v1/__init__.py
+++ b/b2sdk/v1/__init__.py
@@ -125,6 +125,7 @@
 # sync
 
 from b2sdk.sync.action import AbstractAction
+from b2sdk.sync.action import B2CopyAction
 from b2sdk.sync.action import B2DeleteAction
 from b2sdk.sync.action import B2DownloadAction
 from b2sdk.sync.action import B2HideAction
@@ -145,6 +146,9 @@
 from b2sdk.sync.policy import DownAndDeletePolicy
 from b2sdk.sync.policy import DownAndKeepDaysPolicy
 from b2sdk.sync.policy import DownPolicy
+from b2sdk.sync.policy import CopyPolicy
+from b2sdk.sync.policy import CopyAndDeletePolicy
+from b2sdk.sync.policy import CopyAndKeepDaysPolicy
 from b2sdk.sync.policy import UpAndDeletePolicy
 from b2sdk.sync.policy import UpAndKeepDaysPolicy
 from b2sdk.sync.policy import UpPolicy
diff --git a/noxfile.py b/noxfile.py
index 8253c3485..37fcdf210 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -22,8 +22,8 @@
 PY_PATHS = ['b2sdk', 'test', 'noxfile.py', 'setup.py']
 
 REQUIREMENTS_FORMAT = ['yapf==0.27']
-REQUIREMENTS_LINT = ['yapf==0.27', 'pyflakes==2.2.0', 'pytest==5.4.3', 'liccheck==0.4.7']
-REQUIREMENTS_TEST = ['nose==1.3.7', 'pytest==5.4.3', 'pytest-cov==2.10.0']
+REQUIREMENTS_LINT = ['yapf==0.27', 'pyflakes==2.2.0', 'pytest==6.0.1', 'liccheck==0.4.7']
+REQUIREMENTS_TEST = ['nose==1.3.7', 'pytest==6.0.1', 'pytest-cov==2.10.0', 'pytest-mock==3.3.1']
 REQUIREMENTS_BUILD = ['setuptools>=20.2']
 REQUIREMENTS_DOC = [
     'sphinx', 'sphinx-autobuild', 'sphinx_rtd_theme', 'sphinxcontrib-plantuml', 'sadisplay'
@@ -103,11 +103,12 @@ def unit(session):
     """Run unit tests."""
     install_myself(session)
     session.install(*REQUIREMENTS_TEST)
-    session.run(
-        'pytest', '--cov=b2sdk', '--cov-branch', '--cov-report=xml', '--doctest-modules',
-        *session.posargs, 'test/unit'
-    )
-    session.notify('cover')
+    args = ['--cov=b2sdk', '--cov-branch', '--cov-report=xml', '--doctest-modules']
+    session.run('pytest', '--api=v1', *args, *session.posargs, 'test/unit')
+    session.run('pytest', '--api=v0', '--cov-append', *args, *session.posargs, 'test/unit')
+
+    if not session.posargs:
+        session.notify('cover')
 
 
 @nox.session(python=PYTHON_VERSIONS)
diff --git a/test/unit/sync/deps_exception.py b/test/unit/apiver.py
similarity index 65%
rename from test/unit/sync/deps_exception.py
rename to test/unit/apiver.py
index 336ce76f4..465d3bfd3 100644
--- a/test/unit/sync/deps_exception.py
+++ b/test/unit/apiver.py
@@ -1,6 +1,6 @@
 ######################################################################
 #
-# File: test/unit/sync/deps_exception.py
+# File: test/unit/apiver.py
 #
 # Copyright 2020 Backblaze Inc. All Rights Reserved.
 #
@@ -8,4 +8,7 @@
 #
 ######################################################################
 
-from b2sdk.sync.exception import *
+import pytest
+
+# noinspection PyUnresolvedReferences
+apiver, apiver_exception = pytest.get_apiver_modules()
diff --git a/test/unit/conftest.py b/test/unit/conftest.py
new file mode 100644
index 000000000..07f572da7
--- /dev/null
+++ b/test/unit/conftest.py
@@ -0,0 +1,58 @@
+######################################################################
+#
+# File: test/unit_new/conftest.py
+#
+# Copyright 2020 Backblaze Inc. All Rights Reserved.
+#
+# License https://www.backblaze.com/using_b2_code.html
+#
+######################################################################
+
+import importlib
+from functools import partial
+
+import pytest
+
+pytest.register_assert_rewrite('test.unit')
+
+
+def get_apiver_modules(version):
+    return importlib.import_module('b2sdk.%s' % version), importlib.import_module(
+        'b2sdk.%s.exception' % version
+    )
+
+
+@pytest.hookimpl
+def pytest_addoption(parser):
+    parser.addoption(
+        '--api',
+        default='v1',
+        choices=['v0', 'v1'],
+        help='version of the API',
+    )
+
+
+@pytest.hookimpl
+def pytest_configure(config):
+    pytest.get_apiver_modules = partial(get_apiver_modules, config.getoption('--api'))
+
+
+@pytest.hookimpl
+def pytest_report_header(config):
+    return 'b2sdk apiver: %s' % config.getoption('--api')
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_ignore_collect(path, config):
+    path = str(path)
+    ver = config.getoption('--api')
+    if ver == 'v1' and 'v0/' in path:
+        return True
+    if ver == 'v0' and 'v1/' in path:
+        return True
+    return False
+
+
+@pytest.fixture(scope='session')
+def b2sdk_apiver(request):
+    return request.config.getoption('--api')
diff --git a/test/unit/sync/fixtures.py b/test/unit/sync/fixtures.py
new file mode 100644
index 000000000..ac8c072cf
--- /dev/null
+++ b/test/unit/sync/fixtures.py
@@ -0,0 +1,137 @@
+######################################################################
+#
+# File: test/unit/sync/fixtures.py
+#
+# Copyright 2020 Backblaze Inc. All Rights Reserved.
+#
+# License https://www.backblaze.com/using_b2_code.html
+#
+######################################################################
+
+import pytest
+
+from ..apiver import apiver
+
+
+class FakeFolder(apiver.AbstractFolder):
+    def __init__(self, f_type, files=None):
+        if files is None:
+            files = []
+
+        self.f_type = f_type
+        self.files = files
+
+    def all_files(self, reporter, policies_manager=apiver.DEFAULT_SCAN_MANAGER):
+        for single_file in self.files:
+            if single_file.name.endswith('/'):
+                if policies_manager.should_exclude_directory(single_file.name):
+                    continue
+            else:
+                if policies_manager.should_exclude_file(single_file.name):
+                    continue
+            yield single_file
+
+    def folder_type(self):
+        return self.f_type
+
+    def make_full_path(self, name):
+        if self.f_type == 'local':
+            return '/dir/' + name
+        else:
+            return 'folder/' + name
+
+    def __str__(self):
+        return '%s(%s, %s)' % (self.__class__.__name__, self.f_type, self.make_full_path(''))
+
+
+def local_file(name, mod_times, size=10):
+    """
+    Makes a File object for a b2 file, with one FileVersion for
+    each modification time given in mod_times.
+    """
+    versions = [
+        apiver.FileVersion('/dir/%s' % (name,), name, mod_time, 'upload', size)
+        for mod_time in mod_times
+    ]
+    return apiver.File(name, versions)
+
+
+def b2_file(name, mod_times, size=10):
+    """
+    Makes a File object for a b2 file, with one FileVersion for
+    each modification time given in mod_times.
+
+    Positive modification times are uploads, and negative modification
+    times are hides.  It's a hack, but it works.
+
+        b2_file('a.txt', [300, -200, 100])
+
+    Is the same as:
+
+        File(
+            'a.txt',
+            [
+               FileVersion('id_a_300', 'a.txt', 300, 'upload'),
+               FileVersion('id_a_200', 'a.txt', 200, 'hide'),
+               FileVersion('id_a_100', 'a.txt', 100, 'upload')
+            ]
+        )
+    """
+    versions = [
+        apiver.FileVersion(
+            'id_%s_%d' % (name[0], abs(mod_time)),
+            'folder/' + name,
+            abs(mod_time),
+            'upload' if 0 < mod_time else 'hide',
+            size,
+        ) for mod_time in mod_times
+    ]  # yapf disable
+    return apiver.File(name, versions)
+
+
+@pytest.fixture(scope='module')
+def folder_factory():
+    def get_folder(f_type, *files):
+        def get_files():
+            nonlocal files
+            for file in files:
+                if f_type == 'local':
+                    yield local_file(*file)
+                else:
+                    yield b2_file(*file)
+
+        return FakeFolder(f_type, list(get_files()))
+
+    return get_folder
+
+
+@pytest.fixture(scope='module')
+def synchronizer_factory():
+    def get_synchronizer(
+        policies_manager=apiver.DEFAULT_SCAN_MANAGER,
+        dry_run=False,
+        allow_empty_source=False,
+        newer_file_mode=apiver.NewerFileSyncMode.RAISE_ERROR,
+        keep_days_or_delete=apiver.KeepOrDeleteMode.NO_DELETE,
+        keep_days=None,
+        compare_version_mode=apiver.CompareVersionMode.MODTIME,
+        compare_threshold=None,
+    ):
+        return apiver.Synchronizer(
+            1,
+            policies_manager=policies_manager,
+            dry_run=dry_run,
+            allow_empty_source=allow_empty_source,
+            newer_file_mode=newer_file_mode,
+            keep_days_or_delete=keep_days_or_delete,
+            keep_days=keep_days,
+            compare_version_mode=compare_version_mode,
+            compare_threshold=compare_threshold,
+        )
+
+    return get_synchronizer
+
+
+@pytest.fixture
+def synchronizer(synchronizer_factory):
+    return synchronizer_factory()
diff --git a/test/unit/sync/test_base.py b/test/unit/sync/test_base.py
deleted file mode 100644
index 1ff01fd26..000000000
--- a/test/unit/sync/test_base.py
+++ /dev/null
@@ -1,36 +0,0 @@
-######################################################################
-#
-# File: test/unit/sync/test_base.py
-#
-# Copyright 2020 Backblaze Inc. All Rights Reserved.
-#
-# License https://www.backblaze.com/using_b2_code.html
-#
-######################################################################
-
-from contextlib import contextmanager
-import re
-import unittest
-
-
-class TestBase(unittest.TestCase):
-    @contextmanager
-    def assertRaises(self, exc, msg=None):
-        try:
-            yield
-        except exc as e:
-            if msg is not None:
-                if msg != str(e):
-                    assert False, "expected message '%s', but got '%s'" % (msg, str(e))
-        else:
-            assert False, 'should have thrown %s' % (exc,)
-
-    @contextmanager
-    def assertRaisesRegexp(self, expected_exception, expected_regexp):
-        try:
-            yield
-        except expected_exception as e:
-            if not re.search(expected_regexp, str(e)):
-                assert False, "expected message '%s', but got '%s'" % (expected_regexp, str(e))
-        else:
-            assert False, 'should have thrown %s' % (expected_exception,)
diff --git a/test/unit/sync/test_exception.py b/test/unit/sync/test_exception.py
index ab110c0de..2d9a3d392 100644
--- a/test/unit/sync/test_exception.py
+++ b/test/unit/sync/test_exception.py
@@ -7,9 +7,8 @@
 # License https://www.backblaze.com/using_b2_code.html
 #
 ######################################################################
-from .test_base import TestBase
 
-from .deps_exception import (
+from b2sdk.sync.exception import (
     EnvironmentEncodingError,
     InvalidArgument,
     IncompleteSync,
@@ -17,7 +16,7 @@
 )
 
 
-class TestExceptions(TestBase):
+class TestSyncExceptions:
     def test_environment_encoding_error(self):
         try:
             raise EnvironmentEncodingError('fred', 'george')
diff --git a/test/unit/sync/test_sync.py b/test/unit/sync/test_sync.py
new file mode 100644
index 000000000..34e7acffa
--- /dev/null
+++ b/test/unit/sync/test_sync.py
@@ -0,0 +1,668 @@
+######################################################################
+#
+# File: test/unit/sync/test_sync.py
+#
+# Copyright 2020 Backblaze Inc. All Rights Reserved.
+#
+# License https://www.backblaze.com/using_b2_code.html
+#
+######################################################################
+
+from enum import Enum
+from functools import partial
+
+from .fixtures import *
+from ..apiver import apiver, apiver_exception
+
+DAY = 86400000  # milliseconds
+TODAY = DAY * 100  # an arbitrary reference time for testing
+
+
+class TestSynchronizer:
+    class IllegalEnum(Enum):
+        ILLEGAL = 5100
+
+    @pytest.fixture(autouse=True)
+    def setup(self, folder_factory, mocker):
+        self.folder_factory = folder_factory
+        self.local_folder_factory = partial(folder_factory, 'local')
+        self.b2_folder_factory = partial(folder_factory, 'b2')
+        self.reporter = mocker.MagicMock()
+
+    def assert_folder_sync_actions(self, synchronizer, src_folder, dst_folder, expected_actions):
+        """
+        Checks the actions generated for one file.  The file may or may not
+        exist at the source, and may or may not exist at the destination.
+
+        The source and destination files may have multiple versions.
+        """
+        actions = list(
+            synchronizer.make_folder_sync_actions(
+                src_folder,
+                dst_folder,
+                TODAY,
+                self.reporter,
+            )
+        )
+        assert expected_actions == [str(a) for a in actions]
+
+    @pytest.mark.parametrize(
+        'args', [
+            {
+                'newer_file_mode': IllegalEnum.ILLEGAL
+            },
+            {
+                'keep_days_or_delete': IllegalEnum.ILLEGAL
+            },
+        ],
+        ids=[
+            'newer_file_mode',
+            'keep_days_or_delete',
+        ]
+    )
+    def test_illegal_args(self, synchronizer_factory, b2sdk_apiver, args):
+        exceptions = {
+            'v1': apiver_exception.InvalidArgument,
+            'v0': apiver_exception.CommandError,
+        }
+
+        with pytest.raises(exceptions[b2sdk_apiver]):
+            synchronizer_factory(**args)
+
+    def test_illegal(self, synchronizer):
+        with pytest.raises(NotImplementedError):
+            src = self.local_folder_factory()
+            dst = self.local_folder_factory()
+            self.assert_folder_sync_actions(synchronizer, src, dst, [])
+
+    # src: absent, dst: absent
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type',
+        [
+            ('local', 'b2'),
+            ('b2', 'local'),
+            ('b2', 'b2'),
+        ],
+    )
+    def test_empty(self, synchronizer, src_type, dst_type):
+        src = self.folder_factory(src_type)
+        dst = self.folder_factory(dst_type)
+        self.assert_folder_sync_actions(synchronizer, src, dst, [])
+
+    # # src: present, dst: absent
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type,expected',
+        [
+            ('local', 'b2', ['b2_upload(/dir/a.txt, folder/a.txt, 100)']),
+            ('b2', 'local', ['b2_download(folder/a.txt, id_a_100, /dir/a.txt, 100)']),
+            ('b2', 'b2', ['b2_copy(folder/a.txt, id_a_100, folder/a.txt, 100)']),
+        ],
+    )
+    def test_not_there(self, synchronizer, src_type, dst_type, expected):
+        src = self.folder_factory(src_type, ('a.txt', [100]))
+        dst = self.folder_factory(dst_type)
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type,expected',
+        [
+            ('local', ['b2_upload(/dir/directory/a.txt, folder/directory/a.txt, 100)']),
+            ('b2', ['b2_copy(folder/directory/a.txt, id_d_100, folder/directory/a.txt, 100)']),
+        ],
+    )
+    def test_dir_not_there_b2_keepdays(
+        self, synchronizer_factory, src_type, expected
+    ):  # reproduces issue 220
+        src = self.folder_factory(src_type, ('directory/a.txt', [100]))
+        dst = self.b2_folder_factory()
+        synchronizer = synchronizer_factory(
+            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
+        )
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type,expected',
+        [
+            ('local', ['b2_upload(/dir/directory/a.txt, folder/directory/a.txt, 100)']),
+            ('b2', ['b2_copy(folder/directory/a.txt, id_d_100, folder/directory/a.txt, 100)']),
+        ],
+    )
+    def test_dir_not_there_b2_delete(
+        self, synchronizer_factory, src_type, expected
+    ):  # reproduces issue 220
+        src = self.folder_factory(src_type, ('directory/a.txt', [100]))
+        dst = self.b2_folder_factory()
+        synchronizer = synchronizer_factory(keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE)
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    # # src: absent, dst: present
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type',
+        [
+            ('local', 'b2'),
+            ('b2', 'local'),
+            ('b2', 'b2'),
+        ],
+    )
+    def test_no_delete(self, synchronizer, src_type, dst_type):
+        src = self.folder_factory(src_type)
+        dst = self.folder_factory(dst_type, ('a.txt', [100]))
+        self.assert_folder_sync_actions(synchronizer, src, dst, [])
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type,expected',
+        [
+            ('local', 'b2', ['b2_delete(folder/a.txt, id_a_100, )']),
+            ('b2', 'local', ['local_delete(/dir/a.txt)']),
+            ('b2', 'b2', ['b2_delete(folder/a.txt, id_a_100, )']),
+        ],
+    )
+    def test_delete(self, synchronizer_factory, src_type, dst_type, expected):
+        synchronizer = synchronizer_factory(keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE)
+        src = self.folder_factory(src_type)
+        dst = self.folder_factory(dst_type, ('a.txt', [100]))
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type,expected',
+        [
+            ('local', 'b2', ['b2_delete(folder/a.txt, id_a_100, )']),
+            ('b2', 'local', ['local_delete(/dir/a.txt)']),
+            ('b2', 'b2', ['b2_delete(folder/a.txt, id_a_100, )']),
+        ],
+    )
+    def test_delete_large(self, synchronizer_factory, src_type, dst_type, expected):
+        synchronizer = synchronizer_factory(keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE)
+        src = self.folder_factory(src_type)
+        dst = self.folder_factory(dst_type, ('a.txt', [100], 10737418240))
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type',
+        [
+            'local',
+            'b2',
+        ],
+    )
+    def test_delete_multiple_versions(self, synchronizer_factory, src_type):
+        synchronizer = synchronizer_factory(keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE)
+        src = self.folder_factory(src_type)
+        dst = self.b2_folder_factory(('a.txt', [100, 200]))
+        expected = [
+            'b2_delete(folder/a.txt, id_a_100, )',
+            'b2_delete(folder/a.txt, id_a_200, (old version))'
+        ]
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type',
+        [
+            'local',
+            'b2',
+        ],
+    )
+    def test_delete_hide_b2_multiple_versions(self, synchronizer_factory, src_type):
+        synchronizer = synchronizer_factory(
+            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
+        )
+        src = self.folder_factory(src_type)
+        dst = self.b2_folder_factory(('a.txt', [TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY]))
+        expected = [
+            'b2_hide(folder/a.txt)', 'b2_delete(folder/a.txt, id_a_8294400000, (old version))'
+        ]
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type',
+        [
+            'local',
+            'b2',
+        ],
+    )
+    def test_delete_hide_b2_multiple_versions_old(self, synchronizer_factory, src_type):
+        synchronizer = synchronizer_factory(
+            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=2
+        )
+        src = self.folder_factory(src_type)
+        dst = self.b2_folder_factory(('a.txt', [TODAY - 1 * DAY, TODAY - 3 * DAY, TODAY - 5 * DAY]))
+        expected = [
+            'b2_hide(folder/a.txt)', 'b2_delete(folder/a.txt, id_a_8208000000, (old version))'
+        ]
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type',
+        [
+            'local',
+            'b2',
+        ],
+    )
+    def test_already_hidden_multiple_versions_keep(self, synchronizer, src_type):
+        src = self.folder_factory(src_type)
+        dst = self.b2_folder_factory(('a.txt', [-TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY]))
+        self.assert_folder_sync_actions(synchronizer, src, dst, [])
+
+    @pytest.mark.parametrize(
+        'src_type',
+        [
+            'local',
+            'b2',
+        ],
+    )
+    def test_already_hidden_multiple_versions_keep_days(self, synchronizer_factory, src_type):
+        synchronizer = synchronizer_factory(
+            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
+        )
+        src = self.folder_factory(src_type)
+        dst = self.b2_folder_factory(('a.txt', [-TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY]))
+        expected = ['b2_delete(folder/a.txt, id_a_8294400000, (old version))']
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type',
+        [
+            'local',
+            'b2',
+        ],
+    )
+    def test_already_hidden_multiple_versions_keep_days_one_old(
+        self, synchronizer_factory, src_type
+    ):
+        synchronizer = synchronizer_factory(
+            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=5
+        )
+        src = self.folder_factory(src_type)
+        dst = self.b2_folder_factory(
+            ('a.txt', [-(TODAY - 2 * DAY), TODAY - 4 * DAY, TODAY - 6 * DAY])
+        )
+        self.assert_folder_sync_actions(synchronizer, src, dst, [])
+
+    @pytest.mark.parametrize(
+        'src_type',
+        [
+            'local',
+            'b2',
+        ],
+    )
+    def test_already_hidden_multiple_versions_keep_days_two_old(
+        self, synchronizer_factory, src_type
+    ):
+        synchronizer = synchronizer_factory(
+            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=2
+        )
+        src = self.folder_factory(src_type)
+        dst = self.b2_folder_factory(
+            ('a.txt', [-(TODAY - 2 * DAY), TODAY - 4 * DAY, TODAY - 6 * DAY])
+        )
+        expected = ['b2_delete(folder/a.txt, id_a_8121600000, (old version))']
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type',
+        [
+            'local',
+            'b2',
+        ],
+    )
+    def test_already_hidden_multiple_versions_keep_days_delete_hide_marker(
+        self, synchronizer_factory, src_type
+    ):
+        synchronizer = synchronizer_factory(
+            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
+        )
+        src = self.folder_factory(src_type)
+        dst = self.b2_folder_factory(
+            ('a.txt', [-(TODAY - 2 * DAY), TODAY - 4 * DAY, TODAY - 6 * DAY])
+        )
+        expected = [
+            'b2_delete(folder/a.txt, id_a_8467200000, (hide marker))',
+            'b2_delete(folder/a.txt, id_a_8294400000, (old version))',
+            'b2_delete(folder/a.txt, id_a_8121600000, (old version))'
+        ]
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type',
+        [
+            'local',
+            'b2',
+        ],
+    )
+    def test_already_hidden_multiple_versions_keep_days_old_delete(
+        self, synchronizer_factory, src_type
+    ):
+        synchronizer = synchronizer_factory(
+            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
+        )
+        src = self.folder_factory(src_type)
+        dst = self.b2_folder_factory(('a.txt', [-TODAY + 2 * DAY, TODAY - 4 * DAY]))
+        expected = [
+            'b2_delete(folder/a.txt, id_a_8467200000, (hide marker))',
+            'b2_delete(folder/a.txt, id_a_8294400000, (old version))'
+        ]
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type',
+        [
+            'local',
+            'b2',
+        ],
+    )
+    def test_already_hidden_multiple_versions_delete(self, synchronizer_factory, src_type):
+        synchronizer = synchronizer_factory(keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE)
+        src = self.folder_factory(src_type)
+        dst = self.b2_folder_factory(('a.txt', [-TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY]))
+        expected = [
+            'b2_delete(folder/a.txt, id_a_8640000000, (hide marker))',
+            'b2_delete(folder/a.txt, id_a_8467200000, (old version))',
+            'b2_delete(folder/a.txt, id_a_8294400000, (old version))'
+        ]
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    # # src same as dst
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type',
+        [
+            ('local', 'b2'),
+            ('b2', 'local'),
+            ('b2', 'b2'),
+        ],
+    )
+    def test_same(self, synchronizer, src_type, dst_type):
+        src = self.folder_factory(src_type, ('a.txt', [100]))
+        dst = self.folder_factory(dst_type, ('a.txt', [100]))
+        self.assert_folder_sync_actions(synchronizer, src, dst, [])
+
+    @pytest.mark.parametrize(
+        'src_type',
+        [
+            'local',
+            'b2',
+        ],
+    )
+    def test_same_leave_old_version(self, synchronizer, src_type):
+        src = self.folder_factory(src_type, ('a.txt', [TODAY]))
+        dst = self.b2_folder_factory(('a.txt', [TODAY, TODAY - 3 * DAY]))
+        self.assert_folder_sync_actions(synchronizer, src, dst, [])
+
+    @pytest.mark.parametrize(
+        'src_type',
+        [
+            'local',
+            'b2',
+        ],
+    )
+    def test_same_clean_old_version(self, synchronizer_factory, src_type):
+        synchronizer = synchronizer_factory(
+            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
+        )
+        src = self.folder_factory(src_type, ('a.txt', [TODAY - 3 * DAY]))
+        dst = self.b2_folder_factory(('a.txt', [TODAY - 3 * DAY, TODAY - 4 * DAY]))
+        expected = ['b2_delete(folder/a.txt, id_a_8294400000, (old version))']
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type',
+        [
+            'local',
+            'b2',
+        ],
+    )
+    def test_keep_days_no_change_with_old_file(self, synchronizer_factory, src_type):
+        synchronizer = synchronizer_factory(
+            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
+        )
+        src = self.folder_factory(src_type, ('a.txt', [TODAY - 3 * DAY]))
+        dst = self.b2_folder_factory(('a.txt', [TODAY - 3 * DAY]))
+        self.assert_folder_sync_actions(synchronizer, src, dst, [])
+
+    @pytest.mark.parametrize(
+        'src_type',
+        [
+            'local',
+            'b2',
+        ],
+    )
+    def test_same_delete_old_versions(self, synchronizer_factory, src_type):
+        synchronizer = synchronizer_factory(keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE)
+        src = self.folder_factory(src_type, ('a.txt', [TODAY]))
+        dst = self.b2_folder_factory(('a.txt', [TODAY, TODAY - 3 * DAY]))
+        expected = ['b2_delete(folder/a.txt, id_a_8380800000, (old version))']
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    # # src newer than dst
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type,expected',
+        [
+            ('local', 'b2', ['b2_upload(/dir/a.txt, folder/a.txt, 200)']),
+            ('b2', 'local', ['b2_download(folder/a.txt, id_a_200, /dir/a.txt, 200)']),
+            ('b2', 'b2', ['b2_copy(folder/a.txt, id_a_200, folder/a.txt, 200)']),
+        ],
+    )
+    def test_never(self, synchronizer, src_type, dst_type, expected):
+        src = self.folder_factory(src_type, ('a.txt', [200]))
+        dst = self.folder_factory(dst_type, ('a.txt', [100]))
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type,expected',
+        [
+            (
+                'local', [
+                    'b2_upload(/dir/a.txt, folder/a.txt, 8640000000)',
+                    'b2_delete(folder/a.txt, id_a_8208000000, (old version))',
+                ]
+            ),
+            (
+                'b2', [
+                    'b2_copy(folder/a.txt, id_a_8640000000, folder/a.txt, 8640000000)',
+                    'b2_delete(folder/a.txt, id_a_8208000000, (old version))',
+                ]
+            ),
+        ],
+    )
+    def test_newer_clean_old_versions(self, synchronizer_factory, src_type, expected):
+        synchronizer = synchronizer_factory(
+            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=2
+        )
+        src = self.folder_factory(src_type, ('a.txt', [TODAY]))
+        dst = self.b2_folder_factory(('a.txt', [TODAY - 1 * DAY, TODAY - 3 * DAY, TODAY - 5 * DAY]))
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type,expected',
+        [
+            (
+                'local', [
+                    'b2_upload(/dir/a.txt, folder/a.txt, 8640000000)',
+                    'b2_delete(folder/a.txt, id_a_8553600000, (old version))',
+                    'b2_delete(folder/a.txt, id_a_8380800000, (old version))',
+                ]
+            ),
+            (
+                'b2', [
+                    'b2_copy(folder/a.txt, id_a_8640000000, folder/a.txt, 8640000000)',
+                    'b2_delete(folder/a.txt, id_a_8553600000, (old version))',
+                    'b2_delete(folder/a.txt, id_a_8380800000, (old version))',
+                ]
+            ),
+        ],
+    )
+    def test_newer_delete_old_versions(self, synchronizer_factory, src_type, expected):
+        synchronizer = synchronizer_factory(keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE)
+        src = self.folder_factory(src_type, ('a.txt', [TODAY]))
+        dst = self.b2_folder_factory(('a.txt', [TODAY - 1 * DAY, TODAY - 3 * DAY]))
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    # # src older than dst
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type,expected',
+        [
+            ('local', 'b2', ['b2_upload(/dir/a.txt, folder/a.txt, 200)']),
+            ('b2', 'local', ['b2_download(folder/a.txt, id_a_200, /dir/a.txt, 200)']),
+            ('b2', 'b2', ['b2_copy(folder/a.txt, id_a_200, folder/a.txt, 200)']),
+        ],
+    )
+    def test_older(self, synchronizer, b2sdk_apiver, src_type, dst_type, expected):
+        src = self.folder_factory(src_type, ('a.txt', [100]))
+        dst = self.folder_factory(dst_type, ('a.txt', [200]))
+        with pytest.raises(apiver_exception.DestFileNewer) as excinfo:
+            self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+        messages = {
+            'v1': 'source file is older than destination: %s://a.txt with a time of 100 '
+                  'cannot be synced to %s://a.txt with a time of 200, '
+                  'unless a valid newer_file_mode is provided',
+            'v0': 'source file is older than destination: %s://a.txt with a time of 100 '
+                  'cannot be synced to %s://a.txt with a time of 200, '
+                  'unless --skipNewer or --replaceNewer is provided',
+        }  # yapf: disable
+
+        assert str(excinfo.value) == messages[b2sdk_apiver] % (src_type, dst_type)
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type',
+        [
+            ('local', 'b2'),
+            ('b2', 'local'),
+            ('b2', 'b2'),
+        ],
+    )
+    def test_older_skip(self, synchronizer_factory, src_type, dst_type):
+        synchronizer = synchronizer_factory(newer_file_mode=apiver.NewerFileSyncMode.SKIP)
+        src = self.folder_factory(src_type, ('a.txt', [100]))
+        dst = self.folder_factory(dst_type, ('a.txt', [200]))
+        self.assert_folder_sync_actions(synchronizer, src, dst, [])
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type,expected',
+        [
+            ('local', 'b2', ['b2_upload(/dir/a.txt, folder/a.txt, 100)']),
+            ('b2', 'local', ['b2_download(folder/a.txt, id_a_100, /dir/a.txt, 100)']),
+            ('b2', 'b2', ['b2_copy(folder/a.txt, id_a_100, folder/a.txt, 100)']),
+        ],
+    )
+    def test_older_replace(self, synchronizer_factory, src_type, dst_type, expected):
+        synchronizer = synchronizer_factory(newer_file_mode=apiver.NewerFileSyncMode.REPLACE)
+        src = self.folder_factory(src_type, ('a.txt', [100]))
+        dst = self.folder_factory(dst_type, ('a.txt', [200]))
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type,expected',
+        [
+            (
+                'local', [
+                    'b2_upload(/dir/a.txt, folder/a.txt, 100)',
+                    'b2_delete(folder/a.txt, id_a_200, (old version))',
+                ]
+            ),
+            (
+                'b2', [
+                    'b2_copy(folder/a.txt, id_a_100, folder/a.txt, 100)',
+                    'b2_delete(folder/a.txt, id_a_200, (old version))',
+                ]
+            ),
+        ],
+    )
+    def test_older_replace_delete(self, synchronizer_factory, src_type, expected):
+        synchronizer = synchronizer_factory(
+            newer_file_mode=apiver.NewerFileSyncMode.REPLACE,
+            keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE
+        )
+        src = self.folder_factory(src_type, ('a.txt', [100]))
+        dst = self.b2_folder_factory(('a.txt', [200]))
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    # # compareVersions option
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type',
+        [
+            ('local', 'b2'),
+            ('b2', 'local'),
+            ('b2', 'b2'),
+        ],
+    )
+    def test_compare_none_newer(self, synchronizer_factory, src_type, dst_type):
+        synchronizer = synchronizer_factory(compare_version_mode=apiver.CompareVersionMode.NONE)
+        src = self.folder_factory(src_type, ('a.txt', [200]))
+        dst = self.folder_factory(dst_type, ('a.txt', [100]))
+        self.assert_folder_sync_actions(synchronizer, src, dst, [])
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type',
+        [
+            ('local', 'b2'),
+            ('b2', 'local'),
+            ('b2', 'b2'),
+        ],
+    )
+    def test_compare_none_older(self, synchronizer_factory, src_type, dst_type):
+        synchronizer = synchronizer_factory(compare_version_mode=apiver.CompareVersionMode.NONE)
+        src = self.folder_factory(src_type, ('a.txt', [100]))
+        dst = self.folder_factory(dst_type, ('a.txt', [200]))
+        self.assert_folder_sync_actions(synchronizer, src, dst, [])
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type',
+        [
+            ('local', 'b2'),
+            ('b2', 'local'),
+            ('b2', 'b2'),
+        ],
+    )
+    def test_compare_size_equal(self, synchronizer_factory, src_type, dst_type):
+        synchronizer = synchronizer_factory(compare_version_mode=apiver.CompareVersionMode.SIZE)
+        src = self.folder_factory(src_type, ('a.txt', [200], 10))
+        dst = self.folder_factory(dst_type, ('a.txt', [100], 10))
+        self.assert_folder_sync_actions(synchronizer, src, dst, [])
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type,expected',
+        [
+            ('local', 'b2', ['b2_upload(/dir/a.txt, folder/a.txt, 200)']),
+            ('b2', 'local', ['b2_download(folder/a.txt, id_a_200, /dir/a.txt, 200)']),
+            ('b2', 'b2', ['b2_copy(folder/a.txt, id_a_200, folder/a.txt, 200)']),
+        ],
+    )
+    def test_compare_size_not_equal(self, synchronizer_factory, src_type, dst_type, expected):
+        synchronizer = synchronizer_factory(compare_version_mode=apiver.CompareVersionMode.SIZE)
+        src = self.folder_factory(src_type, ('a.txt', [200], 11))
+        dst = self.folder_factory(dst_type, ('a.txt', [100], 10))
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
+
+    @pytest.mark.parametrize(
+        'src_type,dst_type,expected',
+        [
+            (
+                'local', 'b2', [
+                    'b2_upload(/dir/a.txt, folder/a.txt, 200)',
+                    'b2_delete(folder/a.txt, id_a_100, (old version))'
+                ]
+            ),
+            ('b2', 'local', ['b2_download(folder/a.txt, id_a_200, /dir/a.txt, 200)']),
+            (
+                'b2', 'b2', [
+                    'b2_copy(folder/a.txt, id_a_200, folder/a.txt, 200)',
+                    'b2_delete(folder/a.txt, id_a_100, (old version))'
+                ]
+            ),
+        ],
+    )
+    def test_compare_size_not_equal_delete(
+        self, synchronizer_factory, src_type, dst_type, expected
+    ):
+        synchronizer = synchronizer_factory(
+            compare_version_mode=apiver.CompareVersionMode.SIZE,
+            keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE
+        )
+        src = self.folder_factory(src_type, ('a.txt', [200], 11))
+        dst = self.folder_factory(dst_type, ('a.txt', [100], 10))
+        self.assert_folder_sync_actions(synchronizer, src, dst, expected)
diff --git a/test/unit/v0/test_sync.py b/test/unit/v0/test_sync.py
index 8ac7bb677..30732df98 100644
--- a/test/unit/v0/test_sync.py
+++ b/test/unit/v0/test_sync.py
@@ -20,7 +20,6 @@
 
 from .test_base import TestBase
 
-from .deps_exception import CommandError, DestFileNewer
 from .deps_exception import UnSyncableFilename
 from .deps import FileVersionInfo
 from .deps import AbstractFolder, B2Folder, LocalFolder
@@ -733,39 +732,6 @@ def __init__(
         self.excludeAllSymlinks = excludeAllSymlinks
 
 
-def b2_file(name, mod_times, size=10):
-    """
-    Makes a File object for a b2 file, with one FileVersion for
-    each modification time given in mod_times.
-
-    Positive modification times are uploads, and negative modification
-    times are hides.  It's a hack, but it works.
-
-        b2_file('a.txt', [300, -200, 100])
-
-    Is the same as:
-
-        File(
-            'a.txt',
-            [
-               FileVersion('id_a_300', 'a.txt', 300, 'upload'),
-               FileVersion('id_a_200', 'a.txt', 200, 'hide'),
-               FileVersion('id_a_100', 'a.txt', 100, 'upload')
-            ]
-        )
-    """
-    versions = [
-        FileVersion(
-            'id_%s_%d' % (name[0], abs(mod_time)),
-            'folder/' + name,
-            abs(mod_time),
-            'upload' if 0 < mod_time else 'hide',
-            size,
-        ) for mod_time in mod_times
-    ]  # yapf disable
-    return File(name, versions)
-
-
 def local_file(name, mod_times, size=10):
     """
     Makes a File object for a b2 file, with one FileVersion for
@@ -831,332 +797,6 @@ def test_file_exclusions_inclusions_with_delete(self):
         self._check_folder_sync(expected_actions, fakeargs)
 
 
-class TestMakeSyncActions(TestSync):
-    def test_illegal_local_to_local(self):
-        local_folder = FakeFolder('local', [])
-        with self.assertRaises(NotImplementedError):
-            list(make_folder_sync_actions(local_folder, local_folder, FakeArgs(), 0, self.reporter))
-
-    def test_illegal_skip_and_replace(self):
-        with self.assertRaises(CommandError):
-            self._check_local_to_b2(None, None, FakeArgs(skipNewer=True, replaceNewer=True), [])
-
-    def test_illegal_delete_and_keep_days(self):
-        with self.assertRaises(CommandError):
-            self._check_local_to_b2(None, None, FakeArgs(delete=True, keepDays=1), [])
-
-    # src: absent, dst: absent
-
-    def test_empty_b2(self):
-        self._check_local_to_b2(None, None, FakeArgs(), [])
-
-    def test_empty_local(self):
-        self._check_b2_to_local(None, None, FakeArgs(), [])
-
-    # src: present, dst: absent
-
-    def test_not_there_b2(self):
-        src_file = local_file('a.txt', [100])
-        self._check_local_to_b2(
-            src_file, None, FakeArgs(), ['b2_upload(/dir/a.txt, folder/a.txt, 100)']
-        )
-
-    def test_dir_not_there_b2_keepdays(self):  # reproduces issue 220
-        src_file = b2_file('directory/a.txt', [100])
-        actions = ['b2_upload(/dir/directory/a.txt, folder/directory/a.txt, 100)']
-        self._check_local_to_b2(src_file, None, FakeArgs(keepDays=1), actions)
-
-    def test_dir_not_there_b2_delete(self):  # reproduces issue 218
-        src_file = b2_file('directory/a.txt', [100])
-        actions = ['b2_upload(/dir/directory/a.txt, folder/directory/a.txt, 100)']
-        self._check_local_to_b2(src_file, None, FakeArgs(delete=True), actions)
-
-    def test_not_there_local(self):
-        src_file = b2_file('a.txt', [100])
-        actions = ['b2_download(folder/a.txt, id_a_100, /dir/a.txt, 100)']
-        self._check_b2_to_local(src_file, None, FakeArgs(), actions)
-
-    # src: absent, dst: present
-
-    def test_no_delete_b2(self):
-        dst_file = b2_file('a.txt', [100])
-        self._check_local_to_b2(None, dst_file, FakeArgs(), [])
-
-    def test_no_delete_local(self):
-        dst_file = local_file('a.txt', [100])
-        self._check_b2_to_local(None, dst_file, FakeArgs(), [])
-
-    def test_delete_b2(self):
-        dst_file = b2_file('a.txt', [100])
-        actions = ['b2_delete(folder/a.txt, id_a_100, )']
-        self._check_local_to_b2(None, dst_file, FakeArgs(delete=True), actions)
-
-    def test_delete_large_b2(self):
-        dst_file = b2_file('a.txt', [100])
-        actions = ['b2_delete(folder/a.txt, id_a_100, )']
-        self._check_local_to_b2(None, dst_file, FakeArgs(delete=True), actions)
-
-    def test_delete_b2_multiple_versions(self):
-        dst_file = b2_file('a.txt', [100, 200])
-        actions = [
-            'b2_delete(folder/a.txt, id_a_100, )',
-            'b2_delete(folder/a.txt, id_a_200, (old version))'
-        ]
-        self._check_local_to_b2(None, dst_file, FakeArgs(delete=True), actions)
-
-    def test_delete_hide_b2_multiple_versions(self):
-        dst_file = b2_file('a.txt', [TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY])
-        actions = [
-            'b2_hide(folder/a.txt)', 'b2_delete(folder/a.txt, id_a_8294400000, (old version))'
-        ]
-        self._check_local_to_b2(None, dst_file, FakeArgs(keepDays=1), actions)
-
-    def test_delete_hide_b2_multiple_versions_old(self):
-        dst_file = b2_file('a.txt', [TODAY - 1 * DAY, TODAY - 3 * DAY, TODAY - 5 * DAY])
-        actions = [
-            'b2_hide(folder/a.txt)', 'b2_delete(folder/a.txt, id_a_8208000000, (old version))'
-        ]
-        self._check_local_to_b2(None, dst_file, FakeArgs(keepDays=2), actions)
-
-    def test_already_hidden_multiple_versions_keep(self):
-        dst_file = b2_file('a.txt', [-TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY])
-        self._check_local_to_b2(None, dst_file, FakeArgs(), [])
-
-    def test_already_hidden_multiple_versions_keep_days(self):
-        dst_file = b2_file('a.txt', [-TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY])
-        actions = ['b2_delete(folder/a.txt, id_a_8294400000, (old version))']
-        self._check_local_to_b2(None, dst_file, FakeArgs(keepDays=1), actions)
-
-    def test_already_hidden_multiple_versions_keep_days_one_old(self):
-        # The 6-day-old file should be preserved, because it was visible
-        # 5 days ago.
-        dst_file = b2_file('a.txt', [-(TODAY - 2 * DAY), TODAY - 4 * DAY, TODAY - 6 * DAY])
-        actions = []
-        self._check_local_to_b2(None, dst_file, FakeArgs(keepDays=5), actions)
-
-    def test_already_hidden_multiple_versions_keep_days_two_old(self):
-        dst_file = b2_file('a.txt', [-(TODAY - 2 * DAY), TODAY - 4 * DAY, TODAY - 6 * DAY])
-        actions = ['b2_delete(folder/a.txt, id_a_8121600000, (old version))']
-        self._check_local_to_b2(None, dst_file, FakeArgs(keepDays=2), actions)
-
-    def test_already_hidden_multiple_versions_keep_days_delete_hide_marker(self):
-        dst_file = b2_file('a.txt', [-(TODAY - 2 * DAY), TODAY - 4 * DAY, TODAY - 6 * DAY])
-        actions = [
-            'b2_delete(folder/a.txt, id_a_8467200000, (hide marker))',
-            'b2_delete(folder/a.txt, id_a_8294400000, (old version))',
-            'b2_delete(folder/a.txt, id_a_8121600000, (old version))'
-        ]
-        self._check_local_to_b2(None, dst_file, FakeArgs(keepDays=1), actions)
-
-    def test_already_hidden_multiple_versions_keep_days_old_delete(self):
-        dst_file = b2_file('a.txt', [-TODAY + 2 * DAY, TODAY - 4 * DAY])
-        actions = [
-            'b2_delete(folder/a.txt, id_a_8467200000, (hide marker))',
-            'b2_delete(folder/a.txt, id_a_8294400000, (old version))'
-        ]
-        self._check_local_to_b2(None, dst_file, FakeArgs(keepDays=1), actions)
-
-    def test_already_hidden_multiple_versions_delete(self):
-        dst_file = b2_file('a.txt', [-TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY])
-        actions = [
-            'b2_delete(folder/a.txt, id_a_8640000000, (hide marker))',
-            'b2_delete(folder/a.txt, id_a_8467200000, (old version))',
-            'b2_delete(folder/a.txt, id_a_8294400000, (old version))'
-        ]
-        self._check_local_to_b2(None, dst_file, FakeArgs(delete=True), actions)
-
-    def test_delete_local(self):
-        dst_file = local_file('a.txt', [100])
-        self._check_b2_to_local(None, dst_file, FakeArgs(delete=True), ['local_delete(/dir/a.txt)'])
-
-    # src same as dst
-
-    def test_same_b2(self):
-        src_file = local_file('a.txt', [100])
-        dst_file = b2_file('a.txt', [100])
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(), [])
-
-    def test_same_local(self):
-        src_file = b2_file('a.txt', [100])
-        dst_file = local_file('a.txt', [100])
-        self._check_b2_to_local(src_file, dst_file, FakeArgs(), [])
-
-    def test_same_leave_old_versions(self):
-        src_file = local_file('a.txt', [TODAY])
-        dst_file = b2_file('a.txt', [TODAY, TODAY - 3 * DAY])
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(), [])
-
-    def test_same_clean_old_versions(self):
-        src_file = local_file('a.txt', [TODAY - 3 * DAY])
-        dst_file = b2_file('a.txt', [TODAY - 3 * DAY, TODAY - 4 * DAY])
-        actions = ['b2_delete(folder/a.txt, id_a_8294400000, (old version))']
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(keepDays=1), actions)
-
-    def test_keep_days_no_change_with_old_file(self):
-        src_file = local_file('a.txt', [TODAY - 3 * DAY])
-        dst_file = b2_file('a.txt', [TODAY - 3 * DAY])
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(keepDays=1), [])
-
-    def test_same_delete_old_versions(self):
-        src_file = local_file('a.txt', [TODAY])
-        dst_file = b2_file('a.txt', [TODAY, TODAY - 3 * DAY])
-        actions = ['b2_delete(folder/a.txt, id_a_8380800000, (old version))']
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(delete=True), actions)
-
-    # src newer than dst
-
-    def test_newer_b2(self):
-        src_file = local_file('a.txt', [200])
-        dst_file = b2_file('a.txt', [100])
-        actions = ['b2_upload(/dir/a.txt, folder/a.txt, 200)']
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(), actions)
-
-    def test_newer_b2_clean_old_versions(self):
-        src_file = local_file('a.txt', [TODAY])
-        dst_file = b2_file('a.txt', [TODAY - 1 * DAY, TODAY - 3 * DAY, TODAY - 5 * DAY])
-        actions = [
-            'b2_upload(/dir/a.txt, folder/a.txt, 8640000000)',
-            'b2_delete(folder/a.txt, id_a_8208000000, (old version))'
-        ]
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(keepDays=2), actions)
-
-    def test_newer_b2_delete_old_versions(self):
-        src_file = local_file('a.txt', [TODAY])
-        dst_file = b2_file('a.txt', [TODAY - 1 * DAY, TODAY - 3 * DAY])
-        actions = [
-            'b2_upload(/dir/a.txt, folder/a.txt, 8640000000)',
-            'b2_delete(folder/a.txt, id_a_8553600000, (old version))',
-            'b2_delete(folder/a.txt, id_a_8380800000, (old version))'
-        ]  # yapf disable
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(delete=True), actions)
-
-    def test_newer_local(self):
-        src_file = b2_file('a.txt', [200])
-        dst_file = local_file('a.txt', [100])
-        actions = ['b2_download(folder/a.txt, id_a_200, /dir/a.txt, 200)']
-        self._check_b2_to_local(src_file, dst_file, FakeArgs(delete=True), actions)
-
-    # src older than dst
-
-    def test_older_b2(self):
-        src_file = local_file('a.txt', [100])
-        dst_file = b2_file('a.txt', [200])
-        try:
-            self._check_local_to_b2(src_file, dst_file, FakeArgs(), [])
-            self.fail('should have raised DestFileNewer')
-        except DestFileNewer as e:
-            self.assertEqual(
-                'source file is older than destination: local://a.txt with a time of 100 cannot be synced to b2://a.txt with a time of 200, unless --skipNewer or --replaceNewer is provided',
-                str(e)
-            )
-
-    def test_older_b2_skip(self):
-        src_file = local_file('a.txt', [100])
-        dst_file = b2_file('a.txt', [200])
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(skipNewer=True), [])
-
-    def test_older_b2_replace(self):
-        src_file = local_file('a.txt', [100])
-        dst_file = b2_file('a.txt', [200])
-        actions = ['b2_upload(/dir/a.txt, folder/a.txt, 100)']
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(replaceNewer=True), actions)
-
-    def test_older_b2_replace_delete(self):
-        src_file = local_file('a.txt', [100])
-        dst_file = b2_file('a.txt', [200])
-        args = FakeArgs(replaceNewer=True, delete=True)
-        actions = [
-            'b2_upload(/dir/a.txt, folder/a.txt, 100)',
-            'b2_delete(folder/a.txt, id_a_200, (old version))'
-        ]
-        self._check_local_to_b2(src_file, dst_file, args, actions)
-
-    def test_older_local(self):
-        src_file = b2_file('directory/a.txt', [100])
-        dst_file = local_file('directory/a.txt', [200])
-        try:
-            self._check_b2_to_local(src_file, dst_file, FakeArgs(), [])
-            self.fail('should have raised DestFileNewer')
-        except DestFileNewer as e:
-            self.assertEqual(
-                'source file is older than destination: b2://directory/a.txt with a time of 100 cannot be synced to local://directory/a.txt with a time of 200, unless --skipNewer or --replaceNewer is provided',
-                str(e)
-            )
-
-    def test_older_local_skip(self):
-        src_file = b2_file('a.txt', [100])
-        dst_file = local_file('a.txt', [200])
-        self._check_b2_to_local(src_file, dst_file, FakeArgs(skipNewer=True), [])
-
-    def test_older_local_replace(self):
-        src_file = b2_file('a.txt', [100])
-        dst_file = local_file('a.txt', [200])
-        actions = ['b2_download(folder/a.txt, id_a_100, /dir/a.txt, 100)']
-        self._check_b2_to_local(src_file, dst_file, FakeArgs(replaceNewer=True), actions)
-
-    # compareVersions option
-
-    def test_compare_b2_none_newer(self):
-        src_file = local_file('a.txt', [200])
-        dst_file = b2_file('a.txt', [100])
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(compareVersions='none'), [])
-
-    def test_compare_b2_none_older(self):
-        src_file = local_file('a.txt', [100])
-        dst_file = b2_file('a.txt', [200])
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(compareVersions='none'), [])
-
-    def test_compare_b2_size_equal(self):
-        src_file = local_file('a.txt', [200], size=10)
-        dst_file = b2_file('a.txt', [100], size=10)
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(compareVersions='size'), [])
-
-    def test_compare_b2_size_not_equal(self):
-        src_file = local_file('a.txt', [200], size=11)
-        dst_file = b2_file('a.txt', [100], size=10)
-        actions = ['b2_upload(/dir/a.txt, folder/a.txt, 200)']
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(compareVersions='size'), actions)
-
-    def test_compare_b2_size_not_equal_delete(self):
-        src_file = local_file('a.txt', [200], size=11)
-        dst_file = b2_file('a.txt', [100], size=10)
-        args = FakeArgs(compareVersions='size', delete=True)
-        actions = [
-            'b2_upload(/dir/a.txt, folder/a.txt, 200)',
-            'b2_delete(folder/a.txt, id_a_100, (old version))'
-        ]
-        self._check_local_to_b2(src_file, dst_file, args, actions)
-
-    # helper methods
-
-    def _check_local_to_b2(self, src_file, dst_file, args, expected_actions):
-        self._check_one_file('local', src_file, 'b2', dst_file, args, expected_actions)
-
-    def _check_b2_to_local(self, src_file, dst_file, args, expected_actions):
-        self._check_one_file('b2', src_file, 'local', dst_file, args, expected_actions)
-
-    def _check_one_file(self, src_type, src_file, dst_type, dst_file, args, expected_actions):
-        """
-        Checks the actions generated for one file.  The file may or may not
-        exist at the source, and may or may not exist at the destination.
-        Passing in None means that the file does not exist.
-
-        The source and destination files may have multiple versions.
-        """
-        src_folder = FakeFolder(src_type, [src_file] if src_file else [])
-        dst_folder = FakeFolder(dst_type, [dst_file] if dst_file else [])
-        actions = list(make_folder_sync_actions(src_folder, dst_folder, args, TODAY, self.reporter))
-        action_strs = [str(a) for a in actions]
-        if expected_actions != action_strs:
-            print('Expected:')
-            for a in expected_actions:
-                print('   ', a)
-            print('Actual:')
-            for a in action_strs:
-                print('   ', a)
-        self.assertEqual(expected_actions, [str(a) for a in actions])
-
-
 class TestBoundedQueueExecutor(TestBase):
     def test_run_more_than_queue_size(self):
         """
diff --git a/test/unit/v1/test_sync.py b/test/unit/v1/test_sync.py
index b7a8557b7..a65cf5236 100644
--- a/test/unit/v1/test_sync.py
+++ b/test/unit/v1/test_sync.py
@@ -15,13 +15,11 @@
 import threading
 import time
 import unittest
-from enum import Enum
 from nose import SkipTest
 from unittest.mock import MagicMock
 
 from .test_base import TestBase
 
-from .deps_exception import DestFileNewer
 from .deps_exception import UnSyncableFilename
 from .deps import FileVersionInfo
 from .deps import AbstractFolder, B2Folder, LocalFolder
@@ -32,7 +30,6 @@
 from .deps import parse_sync_folder
 from .deps import TempDir
 from .deps import Synchronizer
-from .deps import InvalidArgument
 
 DAY = 86400000  # milliseconds
 TODAY = DAY * 100  # an arbitrary reference time for testing
@@ -748,39 +745,6 @@ def get_synchronizer(self, policies_manager=DEFAULT_SCAN_MANAGER):
         )
 
 
-def b2_file(name, mod_times, size=10):
-    """
-    Makes a File object for a b2 file, with one FileVersion for
-    each modification time given in mod_times.
-
-    Positive modification times are uploads, and negative modification
-    times are hides.  It's a hack, but it works.
-
-        b2_file('a.txt', [300, -200, 100])
-
-    Is the same as:
-
-        File(
-            'a.txt',
-            [
-               FileVersion('id_a_300', 'a.txt', 300, 'upload'),
-               FileVersion('id_a_200', 'a.txt', 200, 'hide'),
-               FileVersion('id_a_100', 'a.txt', 100, 'upload')
-            ]
-        )
-    """
-    versions = [
-        FileVersion(
-            'id_%s_%d' % (name[0], abs(mod_time)),
-            'folder/' + name,
-            abs(mod_time),
-            'upload' if 0 < mod_time else 'hide',
-            size,
-        ) for mod_time in mod_times
-    ]  # yapf disable
-    return File(name, versions)
-
-
 def local_file(name, mod_times, size=10):
     """
     Makes a File object for a b2 file, with one FileVersion for
@@ -857,502 +821,6 @@ def test_file_exclusions_inclusions_with_delete(self):
         self._check_folder_sync(expected_actions, fakeargs)
 
 
-class IllegalEnum(Enum):
-    ILLEGAL = 5100
-
-
-class TestMakeSyncActions(TestSync):
-    def test_illegal_local_to_local(self):
-        local_folder = FakeFolder('local', [])
-        with self.assertRaises(NotImplementedError):
-            fakeargs = FakeArgs()
-            syncronizer = fakeargs.get_synchronizer()
-            list(syncronizer.make_folder_sync_actions(local_folder, local_folder, 0, self.reporter))
-
-    def test_illegal_newer_file_mode(self):
-        with self.assertRaises(InvalidArgument):
-            self._check_local_to_b2(
-                None,
-                None,
-                FakeArgs(newer_file_mode=IllegalEnum.ILLEGAL),
-                [],
-            )
-
-    def test_illegal_delete_and_keep_days(self):
-        with self.assertRaises(InvalidArgument):
-            self._check_local_to_b2(
-                None,
-                None,
-                FakeArgs(keep_days_or_delete=IllegalEnum.ILLEGAL),
-                [],
-            )
-
-    # src: absent, dst: absent
-
-    def test_empty_b2(self):
-        self._check_local_to_b2(None, None, FakeArgs(), [])
-
-    def test_empty_local(self):
-        self._check_b2_to_local(None, None, FakeArgs(), [])
-
-    # src: present, dst: absent
-
-    def test_not_there_b2(self):
-        src_file = local_file('a.txt', [100])
-        self._check_local_to_b2(
-            src_file, None, FakeArgs(), ['b2_upload(/dir/a.txt, folder/a.txt, 100)']
-        )
-
-    def test_dir_not_there_b2_keepdays(self):  # reproduces issue 220
-        src_file = b2_file('directory/a.txt', [100])
-        actions = ['b2_upload(/dir/directory/a.txt, folder/directory/a.txt, 100)']
-        self._check_local_to_b2(
-            src_file,
-            None,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1),
-            actions,
-        )
-
-    def test_dir_not_there_b2_delete(self):  # reproduces issue 218
-        src_file = b2_file('directory/a.txt', [100])
-        actions = ['b2_upload(/dir/directory/a.txt, folder/directory/a.txt, 100)']
-        self._check_local_to_b2(
-            src_file,
-            None,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.DELETE),
-            actions,
-        )
-
-    def test_not_there_local(self):
-        src_file = b2_file('a.txt', [100])
-        actions = ['b2_download(folder/a.txt, id_a_100, /dir/a.txt, 100)']
-        self._check_b2_to_local(src_file, None, FakeArgs(), actions)
-
-    # src: absent, dst: present
-
-    def test_no_delete_b2(self):
-        dst_file = b2_file('a.txt', [100])
-        self._check_local_to_b2(None, dst_file, FakeArgs(), [])
-
-    def test_no_delete_local(self):
-        dst_file = local_file('a.txt', [100])
-        self._check_b2_to_local(None, dst_file, FakeArgs(), [])
-
-    def test_delete_b2(self):
-        dst_file = b2_file('a.txt', [100])
-        actions = ['b2_delete(folder/a.txt, id_a_100, )']
-        self._check_local_to_b2(
-            None,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.DELETE),
-            actions,
-        )
-
-    def test_delete_large_b2(self):
-        dst_file = b2_file('a.txt', [100])
-        actions = ['b2_delete(folder/a.txt, id_a_100, )']
-        self._check_local_to_b2(
-            None,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.DELETE),
-            actions,
-        )
-
-    def test_delete_b2_multiple_versions(self):
-        dst_file = b2_file('a.txt', [100, 200])
-        actions = [
-            'b2_delete(folder/a.txt, id_a_100, )',
-            'b2_delete(folder/a.txt, id_a_200, (old version))'
-        ]
-        self._check_local_to_b2(
-            None,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.DELETE),
-            actions,
-        )
-
-    def test_delete_hide_b2_multiple_versions(self):
-        dst_file = b2_file('a.txt', [TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY])
-        actions = [
-            'b2_hide(folder/a.txt)', 'b2_delete(folder/a.txt, id_a_8294400000, (old version))'
-        ]
-        self._check_local_to_b2(
-            None,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1),
-            actions,
-        )
-
-    def test_delete_hide_b2_multiple_versions_old(self):
-        dst_file = b2_file('a.txt', [TODAY - 1 * DAY, TODAY - 3 * DAY, TODAY - 5 * DAY])
-        actions = [
-            'b2_hide(folder/a.txt)', 'b2_delete(folder/a.txt, id_a_8208000000, (old version))'
-        ]
-        self._check_local_to_b2(
-            None,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=2),
-            actions,
-        )
-
-    def test_already_hidden_multiple_versions_keep(self):
-        dst_file = b2_file('a.txt', [-TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY])
-        self._check_local_to_b2(None, dst_file, FakeArgs(), [])
-
-    def test_already_hidden_multiple_versions_keep_days(self):
-        dst_file = b2_file('a.txt', [-TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY])
-        actions = ['b2_delete(folder/a.txt, id_a_8294400000, (old version))']
-        self._check_local_to_b2(
-            None,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1),
-            actions,
-        )
-
-    def test_already_hidden_multiple_versions_keep_days_one_old(self):
-        # The 6-day-old file should be preserved, because it was visible
-        # 5 days ago.
-        dst_file = b2_file('a.txt', [-(TODAY - 2 * DAY), TODAY - 4 * DAY, TODAY - 6 * DAY])
-        actions = []
-        self._check_local_to_b2(
-            None,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=5),
-            actions,
-        )
-
-    def test_already_hidden_multiple_versions_keep_days_two_old(self):
-        dst_file = b2_file('a.txt', [-(TODAY - 2 * DAY), TODAY - 4 * DAY, TODAY - 6 * DAY])
-        actions = ['b2_delete(folder/a.txt, id_a_8121600000, (old version))']
-        self._check_local_to_b2(
-            None,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=2),
-            actions,
-        )
-
-    def test_already_hidden_multiple_versions_keep_days_delete_hide_marker(self):
-        dst_file = b2_file('a.txt', [-(TODAY - 2 * DAY), TODAY - 4 * DAY, TODAY - 6 * DAY])
-        actions = [
-            'b2_delete(folder/a.txt, id_a_8467200000, (hide marker))',
-            'b2_delete(folder/a.txt, id_a_8294400000, (old version))',
-            'b2_delete(folder/a.txt, id_a_8121600000, (old version))'
-        ]
-        self._check_local_to_b2(
-            None,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1),
-            actions,
-        )
-
-    def test_already_hidden_multiple_versions_keep_days_old_delete(self):
-        dst_file = b2_file('a.txt', [-TODAY + 2 * DAY, TODAY - 4 * DAY])
-        actions = [
-            'b2_delete(folder/a.txt, id_a_8467200000, (hide marker))',
-            'b2_delete(folder/a.txt, id_a_8294400000, (old version))'
-        ]
-        self._check_local_to_b2(
-            None,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1),
-            actions,
-        )
-
-    def test_already_hidden_multiple_versions_delete(self):
-        dst_file = b2_file('a.txt', [-TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY])
-        actions = [
-            'b2_delete(folder/a.txt, id_a_8640000000, (hide marker))',
-            'b2_delete(folder/a.txt, id_a_8467200000, (old version))',
-            'b2_delete(folder/a.txt, id_a_8294400000, (old version))'
-        ]
-        self._check_local_to_b2(
-            None,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.DELETE),
-            actions,
-        )
-
-    def test_delete_local(self):
-        dst_file = local_file('a.txt', [100])
-        self._check_b2_to_local(
-            None,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.DELETE),
-            ['local_delete(/dir/a.txt)'],
-        )
-
-    # src same as dst
-
-    def test_same_b2(self):
-        src_file = local_file('a.txt', [100])
-        dst_file = b2_file('a.txt', [100])
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(), [])
-
-    def test_same_local(self):
-        src_file = b2_file('a.txt', [100])
-        dst_file = local_file('a.txt', [100])
-        self._check_b2_to_local(src_file, dst_file, FakeArgs(), [])
-
-    def test_same_leave_old_versions(self):
-        src_file = local_file('a.txt', [TODAY])
-        dst_file = b2_file('a.txt', [TODAY, TODAY - 3 * DAY])
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(), [])
-
-    def test_same_clean_old_versions(self):
-        src_file = local_file('a.txt', [TODAY - 3 * DAY])
-        dst_file = b2_file('a.txt', [TODAY - 3 * DAY, TODAY - 4 * DAY])
-        actions = ['b2_delete(folder/a.txt, id_a_8294400000, (old version))']
-        self._check_local_to_b2(
-            src_file,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1),
-            actions,
-        )
-
-    def test_keep_days_no_change_with_old_file(self):
-        src_file = local_file('a.txt', [TODAY - 3 * DAY])
-        dst_file = b2_file('a.txt', [TODAY - 3 * DAY])
-        self._check_local_to_b2(
-            src_file,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1),
-            [],
-        )
-
-    def test_same_delete_old_versions(self):
-        src_file = local_file('a.txt', [TODAY])
-        dst_file = b2_file('a.txt', [TODAY, TODAY - 3 * DAY])
-        actions = ['b2_delete(folder/a.txt, id_a_8380800000, (old version))']
-        self._check_local_to_b2(
-            src_file,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.DELETE),
-            actions,
-        )
-
-    # src newer than dst
-
-    def test_newer_b2(self):
-        src_file = local_file('a.txt', [200])
-        dst_file = b2_file('a.txt', [100])
-        actions = ['b2_upload(/dir/a.txt, folder/a.txt, 200)']
-        self._check_local_to_b2(src_file, dst_file, FakeArgs(), actions)
-
-    def test_newer_b2_clean_old_versions(self):
-        src_file = local_file('a.txt', [TODAY])
-        dst_file = b2_file('a.txt', [TODAY - 1 * DAY, TODAY - 3 * DAY, TODAY - 5 * DAY])
-        actions = [
-            'b2_upload(/dir/a.txt, folder/a.txt, 8640000000)',
-            'b2_delete(folder/a.txt, id_a_8208000000, (old version))'
-        ]
-        self._check_local_to_b2(
-            src_file,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=2),
-            actions,
-        )
-
-    def test_newer_b2_delete_old_versions(self):
-        src_file = local_file('a.txt', [TODAY])
-        dst_file = b2_file('a.txt', [TODAY - 1 * DAY, TODAY - 3 * DAY])
-        actions = [
-            'b2_upload(/dir/a.txt, folder/a.txt, 8640000000)',
-            'b2_delete(folder/a.txt, id_a_8553600000, (old version))',
-            'b2_delete(folder/a.txt, id_a_8380800000, (old version))'
-        ]  # yapf disable
-        self._check_local_to_b2(
-            src_file,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.DELETE),
-            actions,
-        )
-
-    def test_newer_local(self):
-        src_file = b2_file('a.txt', [200])
-        dst_file = local_file('a.txt', [100])
-        actions = ['b2_download(folder/a.txt, id_a_200, /dir/a.txt, 200)']
-        self._check_b2_to_local(
-            src_file,
-            dst_file,
-            FakeArgs(keep_days_or_delete=KeepOrDeleteMode.DELETE),
-            actions,
-        )
-
-    # src older than dst
-
-    def test_older_b2(self):
-        src_file = local_file('a.txt', [100])
-        dst_file = b2_file('a.txt', [200])
-        try:
-            self._check_local_to_b2(src_file, dst_file, FakeArgs(), [])
-            self.fail('should have raised DestFileNewer')
-        except DestFileNewer as e:
-            self.assertEqual(
-                'source file is older than destination: local://a.txt with a time of 100 cannot be synced to b2://a.txt with a time of 200, unless a valid newer_file_mode is provided',
-                str(e)
-            )
-
-    def test_older_b2_skip(self):
-        src_file = local_file('a.txt', [100])
-        dst_file = b2_file('a.txt', [200])
-        self._check_local_to_b2(
-            src_file,
-            dst_file,
-            FakeArgs(newer_file_mode=NewerFileSyncMode.SKIP),
-            [],
-        )
-
-    def test_older_b2_replace(self):
-        src_file = local_file('a.txt', [100])
-        dst_file = b2_file('a.txt', [200])
-        actions = ['b2_upload(/dir/a.txt, folder/a.txt, 100)']
-        self._check_local_to_b2(
-            src_file,
-            dst_file,
-            FakeArgs(newer_file_mode=NewerFileSyncMode.REPLACE),
-            actions,
-        )
-
-    def test_older_b2_replace_delete(self):
-        src_file = local_file('a.txt', [100])
-        dst_file = b2_file('a.txt', [200])
-        args = FakeArgs(
-            newer_file_mode=NewerFileSyncMode.REPLACE,
-            keep_days_or_delete=KeepOrDeleteMode.DELETE,
-        )
-        actions = [
-            'b2_upload(/dir/a.txt, folder/a.txt, 100)',
-            'b2_delete(folder/a.txt, id_a_200, (old version))'
-        ]
-        self._check_local_to_b2(src_file, dst_file, args, actions)
-
-    def test_older_local(self):
-        src_file = b2_file('directory/a.txt', [100])
-        dst_file = local_file('directory/a.txt', [200])
-        try:
-            self._check_b2_to_local(src_file, dst_file, FakeArgs(), [])
-            self.fail('should have raised DestFileNewer')
-        except DestFileNewer as e:
-            self.assertEqual(
-                'source file is older than destination: b2://directory/a.txt with a time of 100 cannot be synced to local://directory/a.txt with a time of 200, unless a valid newer_file_mode is provided',
-                str(e)
-            )
-
-    def test_older_local_skip(self):
-        src_file = b2_file('a.txt', [100])
-        dst_file = local_file('a.txt', [200])
-        self._check_b2_to_local(
-            src_file,
-            dst_file,
-            FakeArgs(newer_file_mode=NewerFileSyncMode.SKIP),
-            [],
-        )
-
-    def test_older_local_replace(self):
-        src_file = b2_file('a.txt', [100])
-        dst_file = local_file('a.txt', [200])
-        actions = ['b2_download(folder/a.txt, id_a_100, /dir/a.txt, 100)']
-        self._check_b2_to_local(
-            src_file,
-            dst_file,
-            FakeArgs(newer_file_mode=NewerFileSyncMode.REPLACE),
-            actions,
-        )
-
-    # compareVersions option
-
-    def test_compare_b2_none_newer(self):
-        src_file = local_file('a.txt', [200])
-        dst_file = b2_file('a.txt', [100])
-        self._check_local_to_b2(
-            src_file,
-            dst_file,
-            FakeArgs(compare_version_mode=CompareVersionMode.NONE),
-            [],
-        )
-
-    def test_compare_b2_none_older(self):
-        src_file = local_file('a.txt', [100])
-        dst_file = b2_file('a.txt', [200])
-        self._check_local_to_b2(
-            src_file,
-            dst_file,
-            FakeArgs(compare_version_mode=CompareVersionMode.NONE),
-            [],
-        )
-
-    def test_compare_b2_size_equal(self):
-        src_file = local_file('a.txt', [200], size=10)
-        dst_file = b2_file('a.txt', [100], size=10)
-        self._check_local_to_b2(
-            src_file,
-            dst_file,
-            FakeArgs(compare_version_mode=CompareVersionMode.SIZE),
-            [],
-        )
-
-    def test_compare_b2_size_not_equal(self):
-        src_file = local_file('a.txt', [200], size=11)
-        dst_file = b2_file('a.txt', [100], size=10)
-        actions = ['b2_upload(/dir/a.txt, folder/a.txt, 200)']
-        self._check_local_to_b2(
-            src_file,
-            dst_file,
-            FakeArgs(compare_version_mode=CompareVersionMode.SIZE),
-            actions,
-        )
-
-    def test_compare_b2_size_not_equal_delete(self):
-        src_file = local_file('a.txt', [200], size=11)
-        dst_file = b2_file('a.txt', [100], size=10)
-        args = FakeArgs(
-            compare_version_mode=CompareVersionMode.SIZE,
-            keep_days_or_delete=KeepOrDeleteMode.DELETE,
-        )
-        actions = [
-            'b2_upload(/dir/a.txt, folder/a.txt, 200)',
-            'b2_delete(folder/a.txt, id_a_100, (old version))'
-        ]
-        self._check_local_to_b2(src_file, dst_file, args, actions)
-
-    # helper methods
-
-    def _check_local_to_b2(self, src_file, dst_file, fakeargs, expected_actions):
-        self._check_one_file('local', src_file, 'b2', dst_file, fakeargs, expected_actions)
-
-    def _check_b2_to_local(self, src_file, dst_file, fakeargs, expected_actions):
-        self._check_one_file('b2', src_file, 'local', dst_file, fakeargs, expected_actions)
-
-    def _check_one_file(self, src_type, src_file, dst_type, dst_file, fakeargs, expected_actions):
-        """
-        Checks the actions generated for one file.  The file may or may not
-        exist at the source, and may or may not exist at the destination.
-        Passing in None means that the file does not exist.
-
-        The source and destination files may have multiple versions.
-        """
-        src_folder = FakeFolder(src_type, [src_file] if src_file else [])
-        dst_folder = FakeFolder(dst_type, [dst_file] if dst_file else [])
-        synchronizer = fakeargs.get_synchronizer()
-        actions = list(
-            synchronizer.make_folder_sync_actions(
-                src_folder,
-                dst_folder,
-                TODAY,
-                self.reporter,
-            )
-        )
-        action_strs = [str(a) for a in actions]
-        if expected_actions != action_strs:
-            print('Expected:')
-            for a in expected_actions:
-                print('   ', a)
-            print('Actual:')
-            for a in action_strs:
-                print('   ', a)
-        self.assertEqual(expected_actions, [str(a) for a in actions])
-
-
 class TestBoundedQueueExecutor(TestBase):
     def test_run_more_than_queue_size(self):
         """

From 3eb5189746871066ea33039173db1c8787698fdc Mon Sep 17 00:00:00 2001
From: Maciej Lech <maciej.lech@reef.pl>
Date: Sat, 12 Sep 2020 11:52:27 +0200
Subject: [PATCH 03/15] Improve unit testing

---
 test/unit/apiver.py              | 14 ------
 test/unit/conftest.py            | 16 +++----
 test/unit/sync/fixtures.py       | 27 ++++++------
 test/unit/sync/test_exception.py |  2 +-
 test/unit/sync/test_sync.py      | 73 ++++++++++++++++----------------
 5 files changed, 57 insertions(+), 75 deletions(-)
 delete mode 100644 test/unit/apiver.py

diff --git a/test/unit/apiver.py b/test/unit/apiver.py
deleted file mode 100644
index 465d3bfd3..000000000
--- a/test/unit/apiver.py
+++ /dev/null
@@ -1,14 +0,0 @@
-######################################################################
-#
-# File: test/unit/apiver.py
-#
-# Copyright 2020 Backblaze Inc. All Rights Reserved.
-#
-# License https://www.backblaze.com/using_b2_code.html
-#
-######################################################################
-
-import pytest
-
-# noinspection PyUnresolvedReferences
-apiver, apiver_exception = pytest.get_apiver_modules()
diff --git a/test/unit/conftest.py b/test/unit/conftest.py
index 07f572da7..62c13b534 100644
--- a/test/unit/conftest.py
+++ b/test/unit/conftest.py
@@ -1,6 +1,6 @@
 ######################################################################
 #
-# File: test/unit_new/conftest.py
+# File: test/unit/conftest.py
 #
 # Copyright 2020 Backblaze Inc. All Rights Reserved.
 #
@@ -8,20 +8,14 @@
 #
 ######################################################################
 
-import importlib
-from functools import partial
+import sys
+from pathlib import Path
 
 import pytest
 
 pytest.register_assert_rewrite('test.unit')
 
 
-def get_apiver_modules(version):
-    return importlib.import_module('b2sdk.%s' % version), importlib.import_module(
-        'b2sdk.%s.exception' % version
-    )
-
-
 @pytest.hookimpl
 def pytest_addoption(parser):
     parser.addoption(
@@ -34,7 +28,7 @@ def pytest_addoption(parser):
 
 @pytest.hookimpl
 def pytest_configure(config):
-    pytest.get_apiver_modules = partial(get_apiver_modules, config.getoption('--api'))
+    sys.path.insert(0, str(Path(__file__).parent / config.getoption('--api')))
 
 
 @pytest.hookimpl
@@ -54,5 +48,5 @@ def pytest_ignore_collect(path, config):
 
 
 @pytest.fixture(scope='session')
-def b2sdk_apiver(request):
+def apiver(request):
     return request.config.getoption('--api')
diff --git a/test/unit/sync/fixtures.py b/test/unit/sync/fixtures.py
index ac8c072cf..786648d42 100644
--- a/test/unit/sync/fixtures.py
+++ b/test/unit/sync/fixtures.py
@@ -10,10 +10,12 @@
 
 import pytest
 
-from ..apiver import apiver
+from deps import AbstractFolder, File, FileVersion
+from deps import CompareVersionMode, NewerFileSyncMode, KeepOrDeleteMode
+from deps import DEFAULT_SCAN_MANAGER, Synchronizer
 
 
-class FakeFolder(apiver.AbstractFolder):
+class FakeFolder(AbstractFolder):
     def __init__(self, f_type, files=None):
         if files is None:
             files = []
@@ -21,7 +23,7 @@ def __init__(self, f_type, files=None):
         self.f_type = f_type
         self.files = files
 
-    def all_files(self, reporter, policies_manager=apiver.DEFAULT_SCAN_MANAGER):
+    def all_files(self, reporter, policies_manager=DEFAULT_SCAN_MANAGER):
         for single_file in self.files:
             if single_file.name.endswith('/'):
                 if policies_manager.should_exclude_directory(single_file.name):
@@ -50,10 +52,9 @@ def local_file(name, mod_times, size=10):
     each modification time given in mod_times.
     """
     versions = [
-        apiver.FileVersion('/dir/%s' % (name,), name, mod_time, 'upload', size)
-        for mod_time in mod_times
+        FileVersion('/dir/%s' % (name,), name, mod_time, 'upload', size) for mod_time in mod_times
     ]
-    return apiver.File(name, versions)
+    return File(name, versions)
 
 
 def b2_file(name, mod_times, size=10):
@@ -78,7 +79,7 @@ def b2_file(name, mod_times, size=10):
         )
     """
     versions = [
-        apiver.FileVersion(
+        FileVersion(
             'id_%s_%d' % (name[0], abs(mod_time)),
             'folder/' + name,
             abs(mod_time),
@@ -86,7 +87,7 @@ def b2_file(name, mod_times, size=10):
             size,
         ) for mod_time in mod_times
     ]  # yapf disable
-    return apiver.File(name, versions)
+    return File(name, versions)
 
 
 @pytest.fixture(scope='module')
@@ -108,16 +109,16 @@ def get_files():
 @pytest.fixture(scope='module')
 def synchronizer_factory():
     def get_synchronizer(
-        policies_manager=apiver.DEFAULT_SCAN_MANAGER,
+        policies_manager=DEFAULT_SCAN_MANAGER,
         dry_run=False,
         allow_empty_source=False,
-        newer_file_mode=apiver.NewerFileSyncMode.RAISE_ERROR,
-        keep_days_or_delete=apiver.KeepOrDeleteMode.NO_DELETE,
+        newer_file_mode=NewerFileSyncMode.RAISE_ERROR,
+        keep_days_or_delete=KeepOrDeleteMode.NO_DELETE,
         keep_days=None,
-        compare_version_mode=apiver.CompareVersionMode.MODTIME,
+        compare_version_mode=CompareVersionMode.MODTIME,
         compare_threshold=None,
     ):
-        return apiver.Synchronizer(
+        return Synchronizer(
             1,
             policies_manager=policies_manager,
             dry_run=dry_run,
diff --git a/test/unit/sync/test_exception.py b/test/unit/sync/test_exception.py
index 2d9a3d392..0d90765b9 100644
--- a/test/unit/sync/test_exception.py
+++ b/test/unit/sync/test_exception.py
@@ -8,7 +8,7 @@
 #
 ######################################################################
 
-from b2sdk.sync.exception import (
+from deps_exception import (
     EnvironmentEncodingError,
     InvalidArgument,
     IncompleteSync,
diff --git a/test/unit/sync/test_sync.py b/test/unit/sync/test_sync.py
index 34e7acffa..e52b3e9f3 100644
--- a/test/unit/sync/test_sync.py
+++ b/test/unit/sync/test_sync.py
@@ -11,8 +11,10 @@
 from enum import Enum
 from functools import partial
 
+from deps_exception import CommandError, DestFileNewer, InvalidArgument
+from deps import CompareVersionMode, KeepOrDeleteMode, NewerFileSyncMode
+
 from .fixtures import *
-from ..apiver import apiver, apiver_exception
 
 DAY = 86400000  # milliseconds
 TODAY = DAY * 100  # an arbitrary reference time for testing
@@ -60,13 +62,13 @@ def assert_folder_sync_actions(self, synchronizer, src_folder, dst_folder, expec
             'keep_days_or_delete',
         ]
     )
-    def test_illegal_args(self, synchronizer_factory, b2sdk_apiver, args):
+    def test_illegal_args(self, synchronizer_factory, apiver, args):
         exceptions = {
-            'v1': apiver_exception.InvalidArgument,
-            'v0': apiver_exception.CommandError,
+            'v1': InvalidArgument,
+            'v0': CommandError,
         }
 
-        with pytest.raises(exceptions[b2sdk_apiver]):
+        with pytest.raises(exceptions[apiver]):
             synchronizer_factory(**args)
 
     def test_illegal(self, synchronizer):
@@ -118,7 +120,7 @@ def test_dir_not_there_b2_keepdays(
         src = self.folder_factory(src_type, ('directory/a.txt', [100]))
         dst = self.b2_folder_factory()
         synchronizer = synchronizer_factory(
-            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
+            keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
         )
         self.assert_folder_sync_actions(synchronizer, src, dst, expected)
 
@@ -134,7 +136,7 @@ def test_dir_not_there_b2_delete(
     ):  # reproduces issue 220
         src = self.folder_factory(src_type, ('directory/a.txt', [100]))
         dst = self.b2_folder_factory()
-        synchronizer = synchronizer_factory(keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE)
+        synchronizer = synchronizer_factory(keep_days_or_delete=KeepOrDeleteMode.DELETE)
         self.assert_folder_sync_actions(synchronizer, src, dst, expected)
 
     # # src: absent, dst: present
@@ -161,7 +163,7 @@ def test_no_delete(self, synchronizer, src_type, dst_type):
         ],
     )
     def test_delete(self, synchronizer_factory, src_type, dst_type, expected):
-        synchronizer = synchronizer_factory(keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE)
+        synchronizer = synchronizer_factory(keep_days_or_delete=KeepOrDeleteMode.DELETE)
         src = self.folder_factory(src_type)
         dst = self.folder_factory(dst_type, ('a.txt', [100]))
         self.assert_folder_sync_actions(synchronizer, src, dst, expected)
@@ -175,7 +177,7 @@ def test_delete(self, synchronizer_factory, src_type, dst_type, expected):
         ],
     )
     def test_delete_large(self, synchronizer_factory, src_type, dst_type, expected):
-        synchronizer = synchronizer_factory(keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE)
+        synchronizer = synchronizer_factory(keep_days_or_delete=KeepOrDeleteMode.DELETE)
         src = self.folder_factory(src_type)
         dst = self.folder_factory(dst_type, ('a.txt', [100], 10737418240))
         self.assert_folder_sync_actions(synchronizer, src, dst, expected)
@@ -188,7 +190,7 @@ def test_delete_large(self, synchronizer_factory, src_type, dst_type, expected):
         ],
     )
     def test_delete_multiple_versions(self, synchronizer_factory, src_type):
-        synchronizer = synchronizer_factory(keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE)
+        synchronizer = synchronizer_factory(keep_days_or_delete=KeepOrDeleteMode.DELETE)
         src = self.folder_factory(src_type)
         dst = self.b2_folder_factory(('a.txt', [100, 200]))
         expected = [
@@ -206,7 +208,7 @@ def test_delete_multiple_versions(self, synchronizer_factory, src_type):
     )
     def test_delete_hide_b2_multiple_versions(self, synchronizer_factory, src_type):
         synchronizer = synchronizer_factory(
-            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
+            keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
         )
         src = self.folder_factory(src_type)
         dst = self.b2_folder_factory(('a.txt', [TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY]))
@@ -224,7 +226,7 @@ def test_delete_hide_b2_multiple_versions(self, synchronizer_factory, src_type):
     )
     def test_delete_hide_b2_multiple_versions_old(self, synchronizer_factory, src_type):
         synchronizer = synchronizer_factory(
-            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=2
+            keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=2
         )
         src = self.folder_factory(src_type)
         dst = self.b2_folder_factory(('a.txt', [TODAY - 1 * DAY, TODAY - 3 * DAY, TODAY - 5 * DAY]))
@@ -254,7 +256,7 @@ def test_already_hidden_multiple_versions_keep(self, synchronizer, src_type):
     )
     def test_already_hidden_multiple_versions_keep_days(self, synchronizer_factory, src_type):
         synchronizer = synchronizer_factory(
-            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
+            keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
         )
         src = self.folder_factory(src_type)
         dst = self.b2_folder_factory(('a.txt', [-TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY]))
@@ -272,7 +274,7 @@ def test_already_hidden_multiple_versions_keep_days_one_old(
         self, synchronizer_factory, src_type
     ):
         synchronizer = synchronizer_factory(
-            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=5
+            keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=5
         )
         src = self.folder_factory(src_type)
         dst = self.b2_folder_factory(
@@ -291,7 +293,7 @@ def test_already_hidden_multiple_versions_keep_days_two_old(
         self, synchronizer_factory, src_type
     ):
         synchronizer = synchronizer_factory(
-            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=2
+            keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=2
         )
         src = self.folder_factory(src_type)
         dst = self.b2_folder_factory(
@@ -311,7 +313,7 @@ def test_already_hidden_multiple_versions_keep_days_delete_hide_marker(
         self, synchronizer_factory, src_type
     ):
         synchronizer = synchronizer_factory(
-            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
+            keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
         )
         src = self.folder_factory(src_type)
         dst = self.b2_folder_factory(
@@ -335,7 +337,7 @@ def test_already_hidden_multiple_versions_keep_days_old_delete(
         self, synchronizer_factory, src_type
     ):
         synchronizer = synchronizer_factory(
-            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
+            keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
         )
         src = self.folder_factory(src_type)
         dst = self.b2_folder_factory(('a.txt', [-TODAY + 2 * DAY, TODAY - 4 * DAY]))
@@ -353,7 +355,7 @@ def test_already_hidden_multiple_versions_keep_days_old_delete(
         ],
     )
     def test_already_hidden_multiple_versions_delete(self, synchronizer_factory, src_type):
-        synchronizer = synchronizer_factory(keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE)
+        synchronizer = synchronizer_factory(keep_days_or_delete=KeepOrDeleteMode.DELETE)
         src = self.folder_factory(src_type)
         dst = self.b2_folder_factory(('a.txt', [-TODAY, TODAY - 2 * DAY, TODAY - 4 * DAY]))
         expected = [
@@ -399,7 +401,7 @@ def test_same_leave_old_version(self, synchronizer, src_type):
     )
     def test_same_clean_old_version(self, synchronizer_factory, src_type):
         synchronizer = synchronizer_factory(
-            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
+            keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
         )
         src = self.folder_factory(src_type, ('a.txt', [TODAY - 3 * DAY]))
         dst = self.b2_folder_factory(('a.txt', [TODAY - 3 * DAY, TODAY - 4 * DAY]))
@@ -415,7 +417,7 @@ def test_same_clean_old_version(self, synchronizer_factory, src_type):
     )
     def test_keep_days_no_change_with_old_file(self, synchronizer_factory, src_type):
         synchronizer = synchronizer_factory(
-            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
+            keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=1
         )
         src = self.folder_factory(src_type, ('a.txt', [TODAY - 3 * DAY]))
         dst = self.b2_folder_factory(('a.txt', [TODAY - 3 * DAY]))
@@ -429,7 +431,7 @@ def test_keep_days_no_change_with_old_file(self, synchronizer_factory, src_type)
         ],
     )
     def test_same_delete_old_versions(self, synchronizer_factory, src_type):
-        synchronizer = synchronizer_factory(keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE)
+        synchronizer = synchronizer_factory(keep_days_or_delete=KeepOrDeleteMode.DELETE)
         src = self.folder_factory(src_type, ('a.txt', [TODAY]))
         dst = self.b2_folder_factory(('a.txt', [TODAY, TODAY - 3 * DAY]))
         expected = ['b2_delete(folder/a.txt, id_a_8380800000, (old version))']
@@ -469,7 +471,7 @@ def test_never(self, synchronizer, src_type, dst_type, expected):
     )
     def test_newer_clean_old_versions(self, synchronizer_factory, src_type, expected):
         synchronizer = synchronizer_factory(
-            keep_days_or_delete=apiver.KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=2
+            keep_days_or_delete=KeepOrDeleteMode.KEEP_BEFORE_DELETE, keep_days=2
         )
         src = self.folder_factory(src_type, ('a.txt', [TODAY]))
         dst = self.b2_folder_factory(('a.txt', [TODAY - 1 * DAY, TODAY - 3 * DAY, TODAY - 5 * DAY]))
@@ -495,7 +497,7 @@ def test_newer_clean_old_versions(self, synchronizer_factory, src_type, expected
         ],
     )
     def test_newer_delete_old_versions(self, synchronizer_factory, src_type, expected):
-        synchronizer = synchronizer_factory(keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE)
+        synchronizer = synchronizer_factory(keep_days_or_delete=KeepOrDeleteMode.DELETE)
         src = self.folder_factory(src_type, ('a.txt', [TODAY]))
         dst = self.b2_folder_factory(('a.txt', [TODAY - 1 * DAY, TODAY - 3 * DAY]))
         self.assert_folder_sync_actions(synchronizer, src, dst, expected)
@@ -510,10 +512,10 @@ def test_newer_delete_old_versions(self, synchronizer_factory, src_type, expecte
             ('b2', 'b2', ['b2_copy(folder/a.txt, id_a_200, folder/a.txt, 200)']),
         ],
     )
-    def test_older(self, synchronizer, b2sdk_apiver, src_type, dst_type, expected):
+    def test_older(self, synchronizer, apiver, src_type, dst_type, expected):
         src = self.folder_factory(src_type, ('a.txt', [100]))
         dst = self.folder_factory(dst_type, ('a.txt', [200]))
-        with pytest.raises(apiver_exception.DestFileNewer) as excinfo:
+        with pytest.raises(DestFileNewer) as excinfo:
             self.assert_folder_sync_actions(synchronizer, src, dst, expected)
         messages = {
             'v1': 'source file is older than destination: %s://a.txt with a time of 100 '
@@ -524,7 +526,7 @@ def test_older(self, synchronizer, b2sdk_apiver, src_type, dst_type, expected):
                   'unless --skipNewer or --replaceNewer is provided',
         }  # yapf: disable
 
-        assert str(excinfo.value) == messages[b2sdk_apiver] % (src_type, dst_type)
+        assert str(excinfo.value) == messages[apiver] % (src_type, dst_type)
 
     @pytest.mark.parametrize(
         'src_type,dst_type',
@@ -535,7 +537,7 @@ def test_older(self, synchronizer, b2sdk_apiver, src_type, dst_type, expected):
         ],
     )
     def test_older_skip(self, synchronizer_factory, src_type, dst_type):
-        synchronizer = synchronizer_factory(newer_file_mode=apiver.NewerFileSyncMode.SKIP)
+        synchronizer = synchronizer_factory(newer_file_mode=NewerFileSyncMode.SKIP)
         src = self.folder_factory(src_type, ('a.txt', [100]))
         dst = self.folder_factory(dst_type, ('a.txt', [200]))
         self.assert_folder_sync_actions(synchronizer, src, dst, [])
@@ -549,7 +551,7 @@ def test_older_skip(self, synchronizer_factory, src_type, dst_type):
         ],
     )
     def test_older_replace(self, synchronizer_factory, src_type, dst_type, expected):
-        synchronizer = synchronizer_factory(newer_file_mode=apiver.NewerFileSyncMode.REPLACE)
+        synchronizer = synchronizer_factory(newer_file_mode=NewerFileSyncMode.REPLACE)
         src = self.folder_factory(src_type, ('a.txt', [100]))
         dst = self.folder_factory(dst_type, ('a.txt', [200]))
         self.assert_folder_sync_actions(synchronizer, src, dst, expected)
@@ -573,8 +575,7 @@ def test_older_replace(self, synchronizer_factory, src_type, dst_type, expected)
     )
     def test_older_replace_delete(self, synchronizer_factory, src_type, expected):
         synchronizer = synchronizer_factory(
-            newer_file_mode=apiver.NewerFileSyncMode.REPLACE,
-            keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE
+            newer_file_mode=NewerFileSyncMode.REPLACE, keep_days_or_delete=KeepOrDeleteMode.DELETE
         )
         src = self.folder_factory(src_type, ('a.txt', [100]))
         dst = self.b2_folder_factory(('a.txt', [200]))
@@ -591,7 +592,7 @@ def test_older_replace_delete(self, synchronizer_factory, src_type, expected):
         ],
     )
     def test_compare_none_newer(self, synchronizer_factory, src_type, dst_type):
-        synchronizer = synchronizer_factory(compare_version_mode=apiver.CompareVersionMode.NONE)
+        synchronizer = synchronizer_factory(compare_version_mode=CompareVersionMode.NONE)
         src = self.folder_factory(src_type, ('a.txt', [200]))
         dst = self.folder_factory(dst_type, ('a.txt', [100]))
         self.assert_folder_sync_actions(synchronizer, src, dst, [])
@@ -605,7 +606,7 @@ def test_compare_none_newer(self, synchronizer_factory, src_type, dst_type):
         ],
     )
     def test_compare_none_older(self, synchronizer_factory, src_type, dst_type):
-        synchronizer = synchronizer_factory(compare_version_mode=apiver.CompareVersionMode.NONE)
+        synchronizer = synchronizer_factory(compare_version_mode=CompareVersionMode.NONE)
         src = self.folder_factory(src_type, ('a.txt', [100]))
         dst = self.folder_factory(dst_type, ('a.txt', [200]))
         self.assert_folder_sync_actions(synchronizer, src, dst, [])
@@ -619,7 +620,7 @@ def test_compare_none_older(self, synchronizer_factory, src_type, dst_type):
         ],
     )
     def test_compare_size_equal(self, synchronizer_factory, src_type, dst_type):
-        synchronizer = synchronizer_factory(compare_version_mode=apiver.CompareVersionMode.SIZE)
+        synchronizer = synchronizer_factory(compare_version_mode=CompareVersionMode.SIZE)
         src = self.folder_factory(src_type, ('a.txt', [200], 10))
         dst = self.folder_factory(dst_type, ('a.txt', [100], 10))
         self.assert_folder_sync_actions(synchronizer, src, dst, [])
@@ -633,7 +634,7 @@ def test_compare_size_equal(self, synchronizer_factory, src_type, dst_type):
         ],
     )
     def test_compare_size_not_equal(self, synchronizer_factory, src_type, dst_type, expected):
-        synchronizer = synchronizer_factory(compare_version_mode=apiver.CompareVersionMode.SIZE)
+        synchronizer = synchronizer_factory(compare_version_mode=CompareVersionMode.SIZE)
         src = self.folder_factory(src_type, ('a.txt', [200], 11))
         dst = self.folder_factory(dst_type, ('a.txt', [100], 10))
         self.assert_folder_sync_actions(synchronizer, src, dst, expected)
@@ -660,8 +661,8 @@ def test_compare_size_not_equal_delete(
         self, synchronizer_factory, src_type, dst_type, expected
     ):
         synchronizer = synchronizer_factory(
-            compare_version_mode=apiver.CompareVersionMode.SIZE,
-            keep_days_or_delete=apiver.KeepOrDeleteMode.DELETE
+            compare_version_mode=CompareVersionMode.SIZE,
+            keep_days_or_delete=KeepOrDeleteMode.DELETE
         )
         src = self.folder_factory(src_type, ('a.txt', [200], 11))
         dst = self.folder_factory(dst_type, ('a.txt', [100], 10))

From cabba13af57770270fb47ca0240bd5316087573b Mon Sep 17 00:00:00 2001
From: Maciej Lech <maciej.lech@reef.pl>
Date: Sat, 12 Sep 2020 18:49:47 +0200
Subject: [PATCH 04/15] Fix emerge planner

---
 b2sdk/transfer/emerge/planner/planner.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/b2sdk/transfer/emerge/planner/planner.py b/b2sdk/transfer/emerge/planner/planner.py
index 14ee49114..ce002376a 100644
--- a/b2sdk/transfer/emerge/planner/planner.py
+++ b/b2sdk/transfer/emerge/planner/planner.py
@@ -607,7 +607,7 @@ def get_plan_id(self):
             return None
 
         json_id = json.dumps([emerge_part.get_part_id() for emerge_part in self.emerge_parts])
-        return hashlib.sha1(json_id).hexdigest()
+        return hashlib.sha1(json_id.encode()).hexdigest()
 
 
 class StreamingEmergePlan(BaseEmergePlan):

From 8b5328d9ed9b19b9596610318f0dc36174dd5fa4 Mon Sep 17 00:00:00 2001
From: Maciej Lech <maciej.lech@reef.pl>
Date: Sun, 13 Sep 2020 16:58:38 +0200
Subject: [PATCH 05/15] Increase the timeout for server-side copy

---
 b2sdk/b2http.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/b2sdk/b2http.py b/b2sdk/b2http.py
index 40e8a4dd0..0de3eeffa 100644
--- a/b2sdk/b2http.py
+++ b/b2sdk/b2http.py
@@ -246,7 +246,7 @@ class B2Http(object):
     """
 
     # timeout for HTTP GET/POST requests
-    TIMEOUT = 130
+    TIMEOUT = 900  # 15 minutes as server-side copy can take time
 
     def __init__(self, requests_module=None, install_clock_skew_hook=True):
         """

From f089bd946ac99c22b5ba67382f14cd1c8fd1050c Mon Sep 17 00:00:00 2001
From: Maciej Lech <maciej.lech@reef.pl>
Date: Sun, 13 Sep 2020 18:01:21 +0200
Subject: [PATCH 06/15] Add small improvements in sync

---
 b2sdk/sync/sync.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/b2sdk/sync/sync.py b/b2sdk/sync/sync.py
index 8f2e86025..f3b9a8559 100644
--- a/b2sdk/sync/sync.py
+++ b/b2sdk/sync/sync.py
@@ -200,14 +200,14 @@ def sync_folders(self, source_folder, dest_folder, now_millis, reporter):
         local_folder = None
         if source_type == 'local':
             local_folder = source_folder
-        if dest_folder.folder_type() == 'local':
+        if dest_type == 'local':
             local_folder = dest_folder
         if reporter and local_folder is not None:
             sync_executor.submit(count_files, local_folder, reporter)
 
         # Schedule each of the actions
         bucket = None
-        if source_folder.folder_type() == 'b2':
+        if source_type == 'b2':
             bucket = source_folder.bucket
         if dest_type == 'b2':
             bucket = dest_folder.bucket
@@ -267,7 +267,7 @@ def make_folder_sync_actions(
             elif dest_file is None:
                 logger.debug('determined that %s is not present on destination', source_file)
 
-            if source_folder.folder_type() == 'local':
+            if source_type == 'local':
                 if source_file is not None:
                     reporter.update_compare(1)
             else:

From dd3e6ece6fcaa94b9015a4ecbbd6b85c1fa7eca3 Mon Sep 17 00:00:00 2001
From: Maciej Lech <maciej.lech@reef.pl>
Date: Thu, 17 Sep 2020 12:55:06 +0200
Subject: [PATCH 07/15] Update unit tests after review

---
 test/unit/conftest.py                        |  2 +-
 test/unit/sync/fixtures.py                   |  6 +++---
 test/unit/sync/test_exception.py             |  2 +-
 test/unit/sync/test_sync.py                  |  3 +--
 test/unit/v0/apiver/__init__.py              | 12 ++++++++++++
 test/unit/v0/apiver/apiver_deps.py           | 11 +++++++++++
 test/unit/v0/apiver/apiver_deps_exception.py | 11 +++++++++++
 test/unit/v0/deps.py                         |  7 ++++++-
 test/unit/v0/deps_exception.py               |  7 ++++++-
 test/unit/v1/apiver/__init__.py              | 12 ++++++++++++
 test/unit/v1/apiver/apiver_deps.py           | 11 +++++++++++
 test/unit/v1/apiver/apiver_deps_exception.py | 11 +++++++++++
 test/unit/v1/deps.py                         |  7 ++++++-
 test/unit/v1/deps_exception.py               |  7 ++++++-
 14 files changed, 98 insertions(+), 11 deletions(-)
 create mode 100644 test/unit/v0/apiver/__init__.py
 create mode 100644 test/unit/v0/apiver/apiver_deps.py
 create mode 100644 test/unit/v0/apiver/apiver_deps_exception.py
 create mode 100644 test/unit/v1/apiver/__init__.py
 create mode 100644 test/unit/v1/apiver/apiver_deps.py
 create mode 100644 test/unit/v1/apiver/apiver_deps_exception.py

diff --git a/test/unit/conftest.py b/test/unit/conftest.py
index 62c13b534..b2326f5a2 100644
--- a/test/unit/conftest.py
+++ b/test/unit/conftest.py
@@ -28,7 +28,7 @@ def pytest_addoption(parser):
 
 @pytest.hookimpl
 def pytest_configure(config):
-    sys.path.insert(0, str(Path(__file__).parent / config.getoption('--api')))
+    sys.path.insert(0, str(Path(__file__).parent / config.getoption('--api') / 'apiver'))
 
 
 @pytest.hookimpl
diff --git a/test/unit/sync/fixtures.py b/test/unit/sync/fixtures.py
index 786648d42..2211e5c08 100644
--- a/test/unit/sync/fixtures.py
+++ b/test/unit/sync/fixtures.py
@@ -10,9 +10,9 @@
 
 import pytest
 
-from deps import AbstractFolder, File, FileVersion
-from deps import CompareVersionMode, NewerFileSyncMode, KeepOrDeleteMode
-from deps import DEFAULT_SCAN_MANAGER, Synchronizer
+from apiver_deps import AbstractFolder, File, FileVersion
+from apiver_deps import CompareVersionMode, NewerFileSyncMode, KeepOrDeleteMode
+from apiver_deps import DEFAULT_SCAN_MANAGER, Synchronizer
 
 
 class FakeFolder(AbstractFolder):
diff --git a/test/unit/sync/test_exception.py b/test/unit/sync/test_exception.py
index 0d90765b9..522845025 100644
--- a/test/unit/sync/test_exception.py
+++ b/test/unit/sync/test_exception.py
@@ -8,7 +8,7 @@
 #
 ######################################################################
 
-from deps_exception import (
+from apiver_deps_exception import (
     EnvironmentEncodingError,
     InvalidArgument,
     IncompleteSync,
diff --git a/test/unit/sync/test_sync.py b/test/unit/sync/test_sync.py
index e52b3e9f3..bf182f78e 100644
--- a/test/unit/sync/test_sync.py
+++ b/test/unit/sync/test_sync.py
@@ -11,8 +11,7 @@
 from enum import Enum
 from functools import partial
 
-from deps_exception import CommandError, DestFileNewer, InvalidArgument
-from deps import CompareVersionMode, KeepOrDeleteMode, NewerFileSyncMode
+from apiver_deps_exception import CommandError, DestFileNewer, InvalidArgument
 
 from .fixtures import *
 
diff --git a/test/unit/v0/apiver/__init__.py b/test/unit/v0/apiver/__init__.py
new file mode 100644
index 000000000..f213f55a6
--- /dev/null
+++ b/test/unit/v0/apiver/__init__.py
@@ -0,0 +1,12 @@
+######################################################################
+#
+# File: test/unit/v0/apiver/__init__.py
+#
+# Copyright 2020 Backblaze Inc. All Rights Reserved.
+#
+# License https://www.backblaze.com/using_b2_code.html
+#
+######################################################################
+
+# configured by pytest using `--api` option
+# check test/unit/conftest.py:pytest_configure for details
diff --git a/test/unit/v0/apiver/apiver_deps.py b/test/unit/v0/apiver/apiver_deps.py
new file mode 100644
index 000000000..c9ff0c6c4
--- /dev/null
+++ b/test/unit/v0/apiver/apiver_deps.py
@@ -0,0 +1,11 @@
+######################################################################
+#
+# File: test/unit/v0/apiver/apiver_deps.py
+#
+# Copyright 2020 Backblaze Inc. All Rights Reserved.
+#
+# License https://www.backblaze.com/using_b2_code.html
+#
+######################################################################
+
+from b2sdk.v0 import *
diff --git a/test/unit/v0/apiver/apiver_deps_exception.py b/test/unit/v0/apiver/apiver_deps_exception.py
new file mode 100644
index 000000000..74614a306
--- /dev/null
+++ b/test/unit/v0/apiver/apiver_deps_exception.py
@@ -0,0 +1,11 @@
+######################################################################
+#
+# File: test/unit/v0/apiver/apiver_deps_exception.py
+#
+# Copyright 2020 Backblaze Inc. All Rights Reserved.
+#
+# License https://www.backblaze.com/using_b2_code.html
+#
+######################################################################
+
+from b2sdk.v0.exception import *
diff --git a/test/unit/v0/deps.py b/test/unit/v0/deps.py
index 051e469e8..5d36cb216 100644
--- a/test/unit/v0/deps.py
+++ b/test/unit/v0/deps.py
@@ -8,4 +8,9 @@
 #
 ######################################################################
 
-from b2sdk.v0 import *
+# TODO: This module is used in old-style unit tests, written separately for v0 and v1.
+#  It will be removed when all test are rewritten for the new style, like e.g. test/unit/sync/.
+
+# configured by pytest using `--api` option
+# check test/unit/conftest.py:pytest_configure for details
+from apiver_deps import *
diff --git a/test/unit/v0/deps_exception.py b/test/unit/v0/deps_exception.py
index a11f71b29..4d60a21cf 100644
--- a/test/unit/v0/deps_exception.py
+++ b/test/unit/v0/deps_exception.py
@@ -8,4 +8,9 @@
 #
 ######################################################################
 
-from b2sdk.v0.exception import *
+# TODO: This module is used in old-style unit tests, written separately for v0 and v1.
+#  It will be removed when all test are rewritten for the new style, like e.g. test/unit/sync/.
+
+# configured by pytest using `--api` option
+# check test/unit/conftest.py:pytest_configure for details
+from apiver_deps_exception import *
diff --git a/test/unit/v1/apiver/__init__.py b/test/unit/v1/apiver/__init__.py
new file mode 100644
index 000000000..2d3ee8086
--- /dev/null
+++ b/test/unit/v1/apiver/__init__.py
@@ -0,0 +1,12 @@
+######################################################################
+#
+# File: test/unit/v1/apiver/__init__.py
+#
+# Copyright 2020 Backblaze Inc. All Rights Reserved.
+#
+# License https://www.backblaze.com/using_b2_code.html
+#
+######################################################################
+
+# configured by pytest using `--api` option
+# check test/unit/conftest.py:pytest_configure for details
diff --git a/test/unit/v1/apiver/apiver_deps.py b/test/unit/v1/apiver/apiver_deps.py
new file mode 100644
index 000000000..05d683ece
--- /dev/null
+++ b/test/unit/v1/apiver/apiver_deps.py
@@ -0,0 +1,11 @@
+######################################################################
+#
+# File: test/unit/v1/apiver/apiver_deps.py
+#
+# Copyright 2020 Backblaze Inc. All Rights Reserved.
+#
+# License https://www.backblaze.com/using_b2_code.html
+#
+######################################################################
+
+from b2sdk.v1 import *
diff --git a/test/unit/v1/apiver/apiver_deps_exception.py b/test/unit/v1/apiver/apiver_deps_exception.py
new file mode 100644
index 000000000..ea1115e1c
--- /dev/null
+++ b/test/unit/v1/apiver/apiver_deps_exception.py
@@ -0,0 +1,11 @@
+######################################################################
+#
+# File: test/unit/v1/apiver/apiver_deps_exception.py
+#
+# Copyright 2020 Backblaze Inc. All Rights Reserved.
+#
+# License https://www.backblaze.com/using_b2_code.html
+#
+######################################################################
+
+from b2sdk.v1.exception import *
diff --git a/test/unit/v1/deps.py b/test/unit/v1/deps.py
index b5d424b94..42e7e1916 100644
--- a/test/unit/v1/deps.py
+++ b/test/unit/v1/deps.py
@@ -8,4 +8,9 @@
 #
 ######################################################################
 
-from b2sdk.v1 import *
+# TODO: This module is used in old-style unit tests, written separately for v0 and v1.
+#  It will be removed when all test are rewritten for the new style, like e.g. test/unit/sync/.
+
+# configured by pytest using `--api` option
+# check test/unit/conftest.py:pytest_configure for details
+from apiver_deps import *
diff --git a/test/unit/v1/deps_exception.py b/test/unit/v1/deps_exception.py
index af5a4d578..aa8a2a650 100644
--- a/test/unit/v1/deps_exception.py
+++ b/test/unit/v1/deps_exception.py
@@ -8,4 +8,9 @@
 #
 ######################################################################
 
-from b2sdk.v1.exception import *
+# TODO: This module is used in old-style unit tests, written separately for v0 and v1.
+#  It will be removed when all test are rewritten for the new style, like e.g. test/unit/sync/.
+
+# configured by pytest using `--api` option
+# check test/unit/conftest.py:pytest_configure for details
+from apiver_deps_exception import *

From e196a02c31e13e41e4776ff0bc3807b7506b8d8c Mon Sep 17 00:00:00 2001
From: Maciej Lech <maciej.lech@reef.pl>
Date: Thu, 17 Sep 2020 12:55:33 +0200
Subject: [PATCH 08/15] Fix reporter for copy, few fixes

---
 b2sdk/bucket.py                         |  1 +
 b2sdk/sync/action.py                    | 20 ++++++--
 b2sdk/sync/folder.py                    |  5 +-
 b2sdk/sync/report.py                    | 56 +++++++++++++++-------
 b2sdk/sync/sync.py                      | 57 +++++++++++-----------
 b2sdk/transfer/outbound/copy_manager.py | 63 +++++++++++++------------
 noxfile.py                              |  2 +-
 7 files changed, 123 insertions(+), 81 deletions(-)

diff --git a/b2sdk/bucket.py b/b2sdk/bucket.py
index d322dc96d..4530b4a34 100644
--- a/b2sdk/bucket.py
+++ b/b2sdk/bucket.py
@@ -690,6 +690,7 @@ def copy(
         if not length:
             # TODO: it feels like this should be checked on lower level - eg. RawApi
             validate_b2_file_name(new_file_name)
+            progress_listener = progress_listener or DoNothingProgressListener()
             return self.api.services.copy_manager.copy_file(
                 copy_source,
                 new_file_name,
diff --git a/b2sdk/sync/action.py b/b2sdk/sync/action.py
index ffc7acdb4..bacbe01f5 100644
--- a/b2sdk/sync/action.py
+++ b/b2sdk/sync/action.py
@@ -119,11 +119,15 @@ def do_action(self, bucket, reporter):
         :type bucket: b2sdk.bucket.Bucket
         :param reporter: a place to report errors
         """
+        if reporter:
+            progress_listener = SyncFileReporter(reporter)
+        else:
+            progress_listener = None
         bucket.upload(
             UploadSourceLocalFile(self.local_full_path),
             self.b2_file_name,
             file_info={SRC_LAST_MODIFIED_MILLIS: str(self.mod_time_millis)},
-            progress_listener=SyncFileReporter(reporter)
+            progress_listener=progress_listener
         )
 
     def do_report(self, bucket, reporter):
@@ -238,10 +242,15 @@ def do_action(self, bucket, reporter):
         if not os.path.isdir(parent_dir):
             raise Exception('could not create directory %s' % (parent_dir,))
 
+        if reporter:
+            progress_listener = SyncFileReporter(reporter)
+        else:
+            progress_listener = None
+
         # Download the file to a .tmp file
         download_path = self.local_full_path + '.b2.sync.tmp'
         download_dest = DownloadDestLocalFile(download_path)
-        bucket.download_file_by_id(self.file_id, download_dest, SyncFileReporter(reporter))
+        bucket.download_file_by_id(self.file_id, download_dest, progress_listener)
 
         # Move the file into place
         try:
@@ -312,11 +321,16 @@ def do_action(self, bucket, reporter):
         :type bucket: b2sdk.bucket.Bucket
         :param reporter: a place to report errors
         """
+        if reporter:
+            progress_listener = SyncFileReporter(reporter)
+        else:
+            progress_listener = None
+
         bucket.copy(
             self.file_id,
             self.dest_b2_file_name,
             length=self.size,
-            progress_listener=SyncFileReporter(reporter)
+            progress_listener=progress_listener
         )
 
     def do_report(self, bucket, reporter):
diff --git a/b2sdk/sync/folder.py b/b2sdk/sync/folder.py
index 714ba0414..a7b53ba39 100644
--- a/b2sdk/sync/folder.py
+++ b/b2sdk/sync/folder.py
@@ -166,7 +166,7 @@ def ensure_present(self):
         if not os.path.exists(self.root):
             try:
                 os.mkdir(self.root)
-            except:
+            except OSError:
                 raise Exception('unable to create directory %s' % (self.root,))
         elif not os.path.isdir(self.root):
             raise Exception('%s is not a directory' % (self.root,))
@@ -232,7 +232,8 @@ def _walk_relative_paths(cls, local_dir, b2_dir, reporter, policies_manager):
                 continue
 
             if policies_manager.exclude_all_symlinks and os.path.islink(local_path):
-                reporter.symlink_skipped(local_path)
+                if reporter is not None:
+                    reporter.symlink_skipped(local_path)
                 continue
 
             if os.path.isdir(local_path):
diff --git a/b2sdk/sync/report.py b/b2sdk/sync/report.py
index 28947521c..deec9199d 100644
--- a/b2sdk/sync/report.py
+++ b/b2sdk/sync/report.py
@@ -18,7 +18,7 @@
 logger = logging.getLogger(__name__)
 
 
-class SyncReport(object):
+class SyncReport:
     """
     Handle reporting progress for syncing.
 
@@ -45,8 +45,8 @@ def __init__(self, stdout, no_progress):
         self.stdout = stdout
         self.no_progress = no_progress
         self.start_time = time.time()
-        self.local_file_count = 0
-        self.local_done = False
+        self.total_count = 0
+        self.total_done = False
         self.compare_done = False
         self.compare_count = 0
         self.total_transfer_files = 0  # set in end_compare()
@@ -109,9 +109,9 @@ def _update_progress(self):
                 self._last_update_time = now
                 time_delta = time.time() - self.start_time
                 rate = 0 if time_delta == 0 else int(self.transfer_bytes / time_delta)
-                if not self.local_done:
+                if not self.total_done:
                     message = ' count: %d files   compare: %d files   updated: %d files   %s   %s' % (
-                        self.local_file_count,
+                        self.total_count,
                         self.compare_count,
                         self.transfer_files,
                         format_and_scale_number(self.transfer_bytes, 'B'),
@@ -120,15 +120,13 @@ def _update_progress(self):
                 elif not self.compare_done:
                     message = ' compare: %d/%d files   updated: %d files   %s   %s' % (
                         self.compare_count,
-                        self.local_file_count,
+                        self.total_count,
                         self.transfer_files,
                         format_and_scale_number(self.transfer_bytes, 'B'),
                         format_and_scale_number(rate, 'B/s')
                     )  # yapf: disable
                 else:
-                    message = ' compare: %d/%d files   updated: %d/%d files   %s   %s' % (
-                        self.compare_count,
-                        self.local_file_count,
+                    message = ' updated: %d/%d files   %s   %s' % (
                         self.transfer_files,
                         self.total_transfer_files,
                         format_and_scale_fraction(self.transfer_bytes, self.total_transfer_bytes, 'B'),
@@ -169,23 +167,23 @@ def _print_line(self, line, newline):
             self.current_line = line
         self.stdout.flush()
 
-    def update_local(self, delta):
+    def update_total(self, delta):
         """
-        Report that more local files have been found.
+        Report that more files have been found for comparison.
 
         :param delta: number of files found since the last check
         :type delta: int
         """
         with self.lock:
-            self.local_file_count += delta
+            self.total_count += delta
             self._update_progress()
 
-    def end_local(self):
+    def end_total(self):
         """
-        Local file count is done.  Can proceed to step 2.
+        Total files count is done. Can proceed to step 2.
         """
         with self.lock:
-            self.local_done = True
+            self.total_done = True
             self._update_progress()
 
     def update_compare(self, delta):
@@ -251,6 +249,30 @@ def local_permission_error(self, path):
     def symlink_skipped(self, path):
         pass
 
+    @property
+    def local_file_count(self):
+        # TODO: Deprecated. Should be removed in v2
+        return self.total_count
+
+    @local_file_count.setter
+    def local_file_count(self, value):
+        # TODO: Deprecated. Should be removed in v2
+        self.total_count = value
+
+    @property
+    def local_done(self):
+        # TODO: Deprecated. Should be removed in v2
+        return self.total_done
+
+    @local_done.setter
+    def local_done(self, value):
+        # TODO: Deprecated. Should be removed in v2
+        self.total_done = value
+
+    # TODO: Deprecated. Should be removed in v2
+    update_local = update_total
+    end_local = end_total
+
 
 class SyncFileReporter(AbstractProgressListener):
     """
@@ -300,13 +322,13 @@ def sample_sync_report_run():
     sync_report = SyncReport(sys.stdout, False)
 
     for i in range(20):
-        sync_report.update_local(1)
+        sync_report.update_total(1)
         time.sleep(0.2)
         if i == 10:
             sync_report.print_completion('transferred: a.txt')
         if i % 2 == 0:
             sync_report.update_compare(1)
-    sync_report.end_local()
+    sync_report.end_total()
 
     for i in range(10):
         sync_report.update_compare(1)
diff --git a/b2sdk/sync/sync.py b/b2sdk/sync/sync.py
index f3b9a8559..83c72fb09 100644
--- a/b2sdk/sync/sync.py
+++ b/b2sdk/sync/sync.py
@@ -73,7 +73,7 @@ def zip_folders(folder_a, folder_b, reporter, policies_manager=DEFAULT_SCAN_MANA
             current_b = next_or_none(iter_b)
 
 
-def count_files(local_folder, reporter):
+def count_files(local_folder, reporter, policies_manager):
     """
     Count all of the files in a local folder.
 
@@ -82,9 +82,9 @@ def count_files(local_folder, reporter):
     """
     # Don't pass in a reporter to all_files.  Broken symlinks will be reported
     # during the next pass when the source and dest files are compared.
-    for _ in local_folder.all_files(None):
-        reporter.update_local(1)
-    reporter.end_local()
+    for _ in local_folder.all_files(None, policies_manager=policies_manager):
+        reporter.update_total(1)
+    reporter.end_total()
 
 
 @unique
@@ -195,34 +195,24 @@ def sync_folders(self, source_folder, dest_folder, now_millis, reporter):
         queue_limit = self.max_workers + 1000
         sync_executor = BoundedQueueExecutor(unbounded_executor, queue_limit=queue_limit)
 
-        # First, start the thread that counts the local files. That's the operation
-        # that should be fastest, and it provides scale for the progress reporting.
-        local_folder = None
-        if source_type == 'local':
-            local_folder = source_folder
-        if dest_type == 'local':
-            local_folder = dest_folder
-        if reporter and local_folder is not None:
-            sync_executor.submit(count_files, local_folder, reporter)
+        if source_type == 'local' and reporter is not None:
+            # Start the thread that counts the local files. That's the operation
+            # that should be fastest, and it provides scale for the progress reporting.
+            sync_executor.submit(count_files, source_folder, reporter, self.policies_manager)
 
         # Schedule each of the actions
         bucket = None
-        if source_type == 'b2':
-            bucket = source_folder.bucket
         if dest_type == 'b2':
             bucket = dest_folder.bucket
+        elif source_type == 'b2':
+            bucket = source_folder.bucket
 
-        total_files = 0
-        total_bytes = 0
         for action in self.make_folder_sync_actions(
             source_folder, dest_folder, now_millis, reporter, self.policies_manager
         ):
             logging.debug('scheduling action %s on bucket %s', action, bucket)
             sync_executor.submit(action.run, bucket, reporter, self.dry_run)
-            total_files += 1
-            total_bytes += action.get_bytes()
-        if reporter:
-            reporter.end_compare(total_files, total_bytes)
+
         # Wait for everything to finish
         sync_executor.shutdown()
         if sync_executor.get_num_exceptions() != 0:
@@ -256,6 +246,8 @@ def make_folder_sync_actions(
         if source_type != 'b2' and dest_type != 'b2':
             raise NotImplementedError('Sync between two local folders is not supported!')
 
+        total_files = 0
+        total_bytes = 0
         for source_file, dest_file in zip_folders(
             source_folder,
             dest_folder,
@@ -267,13 +259,14 @@ def make_folder_sync_actions(
             elif dest_file is None:
                 logger.debug('determined that %s is not present on destination', source_file)
 
-            if source_type == 'local':
-                if source_file is not None:
-                    reporter.update_compare(1)
-            else:
-                if dest_file is not None:
-                    reporter.update_compare(1)
+            if source_file is not None:
+                if source_type == 'b2':
+                    # For buckets we don't want to count files separately as it would require
+                    # more API calls. Instead, we count them when comparing.
+                    reporter.update_total(1)
+                reporter.update_compare(1)
 
+            import time
             for action in self.make_file_sync_actions(
                 sync_type,
                 source_file,
@@ -282,7 +275,17 @@ def make_folder_sync_actions(
                 dest_folder,
                 now_millis,
             ):
+                total_files += 1
+                total_bytes += action.get_bytes()
                 yield action
+                time.sleep(.02)
+
+        if reporter is not None:
+            if source_type == 'b2':
+                # For buckets we don't want to count files separately as it would require
+                # more API calls. Instead, we count them when comparing.
+                reporter.end_total()
+            reporter.end_compare(total_files, total_bytes)
 
     def make_file_sync_actions(
         self,
diff --git a/b2sdk/transfer/outbound/copy_manager.py b/b2sdk/transfer/outbound/copy_manager.py
index 4d91c5a0c..fa39edf1c 100644
--- a/b2sdk/transfer/outbound/copy_manager.py
+++ b/b2sdk/transfer/outbound/copy_manager.py
@@ -65,10 +65,10 @@ def copy_file(
         self,
         copy_source,
         file_name,
-        content_type=None,
-        file_info=None,
-        destination_bucket_id=None,
-        progress_listener=None,
+        content_type,
+        file_info,
+        destination_bucket_id,
+        progress_listener,
     ):
         # Run small copies in the same thread pool as large file copies,
         # so that they share resources during a sync.
@@ -146,35 +146,36 @@ def _copy_small_file(
         self,
         copy_source,
         file_name,
-        content_type=None,
-        file_info=None,
-        destination_bucket_id=None,
-        progress_listener=None,
+        content_type,
+        file_info,
+        destination_bucket_id,
+        progress_listener,
     ):
-        if progress_listener is not None:
+        with progress_listener:
             progress_listener.set_total_bytes(copy_source.get_content_length() or 0)
 
-        bytes_range = copy_source.get_bytes_range()
+            bytes_range = copy_source.get_bytes_range()
+
+            if content_type is None:
+                if file_info is not None:
+                    raise ValueError('File info can be set only when content type is set')
+                metadata_directive = MetadataDirectiveMode.COPY
+            else:
+                if file_info is None:
+                    raise ValueError('File info can be not set only when content type is not set')
+                metadata_directive = MetadataDirectiveMode.REPLACE
+
+            response = self.services.session.copy_file(
+                copy_source.file_id,
+                file_name,
+                bytes_range=bytes_range,
+                metadata_directive=metadata_directive,
+                content_type=content_type,
+                file_info=file_info,
+                destination_bucket_id=destination_bucket_id
+            )
+            file_info = FileVersionInfoFactory.from_api_response(response)
+            if progress_listener is not None:
+                progress_listener.bytes_completed(file_info.size)
 
-        if content_type is None:
-            if file_info is not None:
-                raise ValueError('File info can be set only when content type is set')
-            metadata_directive = MetadataDirectiveMode.COPY
-        else:
-            if file_info is None:
-                raise ValueError('File info can be not set only when content type is not set')
-            metadata_directive = MetadataDirectiveMode.REPLACE
-
-        response = self.services.session.copy_file(
-            copy_source.file_id,
-            file_name,
-            bytes_range=bytes_range,
-            metadata_directive=metadata_directive,
-            content_type=content_type,
-            file_info=file_info,
-            destination_bucket_id=destination_bucket_id
-        )
-        file_info = FileVersionInfoFactory.from_api_response(response)
-        if progress_listener is not None:
-            progress_listener.bytes_completed(file_info.size)
         return file_info
diff --git a/noxfile.py b/noxfile.py
index 37fcdf210..da7e80456 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -134,7 +134,7 @@ def test(session):
 def cover(session):
     """Perform coverage analysis."""
     session.install('coverage')
-    session.run('coverage', 'report', '--fail-under=75', '--show-missing')
+    session.run('coverage', 'report', '--fail-under=75', '--show-missing', '--skip-covered')
     session.run('coverage', 'erase')
 
 

From c1b51f9d2e12898124f969e29d6a3b2ff4cb58ab Mon Sep 17 00:00:00 2001
From: Maciej Lech <maciej.lech@reef.pl>
Date: Thu, 17 Sep 2020 13:00:22 +0200
Subject: [PATCH 09/15] Fix unit tests for windows

---
 test/unit/conftest.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/test/unit/conftest.py b/test/unit/conftest.py
index b2326f5a2..25e994ca1 100644
--- a/test/unit/conftest.py
+++ b/test/unit/conftest.py
@@ -7,7 +7,7 @@
 # License https://www.backblaze.com/using_b2_code.html
 #
 ######################################################################
-
+import os
 import sys
 from pathlib import Path
 
@@ -40,9 +40,9 @@ def pytest_report_header(config):
 def pytest_ignore_collect(path, config):
     path = str(path)
     ver = config.getoption('--api')
-    if ver == 'v1' and 'v0/' in path:
+    if ver == 'v1' and 'v0' + os.sep in path:
         return True
-    if ver == 'v0' and 'v1/' in path:
+    if ver == 'v0' and 'v1' + os.sep in path:
         return True
     return False
 

From 2f0f77f9206cbbe598b941751835638a5300d560 Mon Sep 17 00:00:00 2001
From: Maciej Lech <maciej.lech@reef.pl>
Date: Thu, 17 Sep 2020 14:57:22 +0200
Subject: [PATCH 10/15] Fix usage of progress listener

---
 b2sdk/download_dest.py                    |  3 +-
 b2sdk/sync/action.py                      | 39 ++++++++-------
 b2sdk/sync/sync.py                        |  2 -
 b2sdk/transfer/emerge/executor.py         | 41 ++++++++--------
 b2sdk/transfer/outbound/copy_manager.py   | 50 ++++++++++---------
 b2sdk/transfer/outbound/upload_manager.py | 56 +++++++++++-----------
 test/unit/v0/test_bucket.py               | 50 ++++++++++---------
 test/unit/v0/test_download_dest.py        | 12 ++---
 test/unit/v1/test_bucket.py               | 58 +++++++++++++----------
 test/unit/v1/test_download_dest.py        | 12 ++---
 10 files changed, 166 insertions(+), 157 deletions(-)

diff --git a/b2sdk/download_dest.py b/b2sdk/download_dest.py
index 35d69ed91..1bf6f0ef1 100644
--- a/b2sdk/download_dest.py
+++ b/b2sdk/download_dest.py
@@ -219,5 +219,4 @@ def write_file_and_report_progress_context(
             if range_ is not None:
                 total_bytes = range_[1] - range_[0] + 1
             self.progress_listener.set_total_bytes(total_bytes)
-            with self.progress_listener:
-                yield WritingStreamWithProgress(file_, self.progress_listener)
+            yield WritingStreamWithProgress(file_, self.progress_listener)
diff --git a/b2sdk/sync/action.py b/b2sdk/sync/action.py
index bacbe01f5..6caa8aae9 100644
--- a/b2sdk/sync/action.py
+++ b/b2sdk/sync/action.py
@@ -13,6 +13,7 @@
 import logging
 import os
 from ..download_dest import DownloadDestLocalFile
+from ..progress import DoNothingProgressListener
 from ..raw_api import SRC_LAST_MODIFIED_MILLIS
 from ..transfer.outbound.upload_source import UploadSourceLocalFile
 from .report import SyncFileReporter
@@ -122,13 +123,15 @@ def do_action(self, bucket, reporter):
         if reporter:
             progress_listener = SyncFileReporter(reporter)
         else:
-            progress_listener = None
-        bucket.upload(
-            UploadSourceLocalFile(self.local_full_path),
-            self.b2_file_name,
-            file_info={SRC_LAST_MODIFIED_MILLIS: str(self.mod_time_millis)},
-            progress_listener=progress_listener
-        )
+            progress_listener = DoNothingProgressListener()
+
+        with progress_listener:
+            bucket.upload(
+                UploadSourceLocalFile(self.local_full_path),
+                self.b2_file_name,
+                file_info={SRC_LAST_MODIFIED_MILLIS: str(self.mod_time_millis)},
+                progress_listener=progress_listener
+            )
 
     def do_report(self, bucket, reporter):
         """
@@ -245,12 +248,13 @@ def do_action(self, bucket, reporter):
         if reporter:
             progress_listener = SyncFileReporter(reporter)
         else:
-            progress_listener = None
+            progress_listener = DoNothingProgressListener()
 
         # Download the file to a .tmp file
         download_path = self.local_full_path + '.b2.sync.tmp'
         download_dest = DownloadDestLocalFile(download_path)
-        bucket.download_file_by_id(self.file_id, download_dest, progress_listener)
+        with progress_listener:
+            bucket.download_file_by_id(self.file_id, download_dest, progress_listener)
 
         # Move the file into place
         try:
@@ -324,14 +328,15 @@ def do_action(self, bucket, reporter):
         if reporter:
             progress_listener = SyncFileReporter(reporter)
         else:
-            progress_listener = None
-
-        bucket.copy(
-            self.file_id,
-            self.dest_b2_file_name,
-            length=self.size,
-            progress_listener=progress_listener
-        )
+            progress_listener = DoNothingProgressListener()
+
+        with progress_listener:
+            bucket.copy(
+                self.file_id,
+                self.dest_b2_file_name,
+                length=self.size,
+                progress_listener=progress_listener
+            )
 
     def do_report(self, bucket, reporter):
         """
diff --git a/b2sdk/sync/sync.py b/b2sdk/sync/sync.py
index 83c72fb09..daa6ee70e 100644
--- a/b2sdk/sync/sync.py
+++ b/b2sdk/sync/sync.py
@@ -266,7 +266,6 @@ def make_folder_sync_actions(
                     reporter.update_total(1)
                 reporter.update_compare(1)
 
-            import time
             for action in self.make_file_sync_actions(
                 sync_type,
                 source_file,
@@ -278,7 +277,6 @@ def make_folder_sync_actions(
                 total_files += 1
                 total_bytes += action.get_bytes()
                 yield action
-                time.sleep(.02)
 
         if reporter is not None:
             if source_type == 'b2':
diff --git a/b2sdk/transfer/emerge/executor.py b/b2sdk/transfer/emerge/executor.py
index 600431069..b351706b5 100644
--- a/b2sdk/transfer/emerge/executor.py
+++ b/b2sdk/transfer/emerge/executor.py
@@ -147,27 +147,26 @@ def execute_plan(self, emerge_plan):
             )
         file_id = unfinished_file.file_id
 
-        with self.progress_listener:
-            large_file_upload_state = LargeFileUploadState(self.progress_listener)
-
-            part_futures = []
-            for part_number, emerge_part in emerge_plan.enumerate_emerge_parts():
-                execution_step_factory = LargeFileEmergeExecutionStepFactory(
-                    self,
-                    emerge_part,
-                    part_number,
-                    file_id,
-                    large_file_upload_state,
-                    finished_parts=finished_parts,
-                )
-                execution_step = execution_step_factory.get_execution_step()
-                future = self._execute_step(execution_step)
-                part_futures.append(future)
-
-            # Collect the sha1 checksums of the parts as the uploads finish.
-            # If any of them raised an exception, that same exception will
-            # be raised here by result()
-            part_sha1_array = [interruptible_get_result(f)['contentSha1'] for f in part_futures]
+        large_file_upload_state = LargeFileUploadState(self.progress_listener)
+
+        part_futures = []
+        for part_number, emerge_part in emerge_plan.enumerate_emerge_parts():
+            execution_step_factory = LargeFileEmergeExecutionStepFactory(
+                self,
+                emerge_part,
+                part_number,
+                file_id,
+                large_file_upload_state,
+                finished_parts=finished_parts,
+            )
+            execution_step = execution_step_factory.get_execution_step()
+            future = self._execute_step(execution_step)
+            part_futures.append(future)
+
+        # Collect the sha1 checksums of the parts as the uploads finish.
+        # If any of them raised an exception, that same exception will
+        # be raised here by result()
+        part_sha1_array = [interruptible_get_result(f)['contentSha1'] for f in part_futures]
 
         # Finish the large file
         response = self.services.session.finish_large_file(file_id, part_sha1_array)
diff --git a/b2sdk/transfer/outbound/copy_manager.py b/b2sdk/transfer/outbound/copy_manager.py
index fa39edf1c..c92162b64 100644
--- a/b2sdk/transfer/outbound/copy_manager.py
+++ b/b2sdk/transfer/outbound/copy_manager.py
@@ -151,31 +151,29 @@ def _copy_small_file(
         destination_bucket_id,
         progress_listener,
     ):
-        with progress_listener:
-            progress_listener.set_total_bytes(copy_source.get_content_length() or 0)
-
-            bytes_range = copy_source.get_bytes_range()
-
-            if content_type is None:
-                if file_info is not None:
-                    raise ValueError('File info can be set only when content type is set')
-                metadata_directive = MetadataDirectiveMode.COPY
-            else:
-                if file_info is None:
-                    raise ValueError('File info can be not set only when content type is not set')
-                metadata_directive = MetadataDirectiveMode.REPLACE
-
-            response = self.services.session.copy_file(
-                copy_source.file_id,
-                file_name,
-                bytes_range=bytes_range,
-                metadata_directive=metadata_directive,
-                content_type=content_type,
-                file_info=file_info,
-                destination_bucket_id=destination_bucket_id
-            )
-            file_info = FileVersionInfoFactory.from_api_response(response)
-            if progress_listener is not None:
-                progress_listener.bytes_completed(file_info.size)
+        progress_listener.set_total_bytes(copy_source.get_content_length() or 0)
+
+        bytes_range = copy_source.get_bytes_range()
+
+        if content_type is None:
+            if file_info is not None:
+                raise ValueError('File info can be set only when content type is set')
+            metadata_directive = MetadataDirectiveMode.COPY
+        else:
+            if file_info is None:
+                raise ValueError('File info can be not set only when content type is not set')
+            metadata_directive = MetadataDirectiveMode.REPLACE
+
+        response = self.services.session.copy_file(
+            copy_source.file_id,
+            file_name,
+            bytes_range=bytes_range,
+            metadata_directive=metadata_directive,
+            content_type=content_type,
+            file_info=file_info,
+            destination_bucket_id=destination_bucket_id
+        )
+        file_info = FileVersionInfoFactory.from_api_response(response)
+        progress_listener.bytes_completed(file_info.size)
 
         return file_info
diff --git a/b2sdk/transfer/outbound/upload_manager.py b/b2sdk/transfer/outbound/upload_manager.py
index 535c171d8..0c9017765 100644
--- a/b2sdk/transfer/outbound/upload_manager.py
+++ b/b2sdk/transfer/outbound/upload_manager.py
@@ -184,34 +184,32 @@ def _upload_small_file(
         content_length = upload_source.get_content_length()
         exception_info_list = []
         progress_listener.set_total_bytes(content_length)
-        with progress_listener:
-            for _ in range(self.MAX_UPLOAD_ATTEMPTS):
-                try:
-                    with upload_source.open() as file:
-                        input_stream = ReadingStreamWithProgress(
-                            file, progress_listener, length=content_length
-                        )
-                        if upload_source.is_sha1_known():
-                            content_sha1 = upload_source.get_content_sha1()
-                        else:
-                            input_stream = StreamWithHash(
-                                input_stream, stream_length=content_length
-                            )
-                            content_sha1 = HEX_DIGITS_AT_END
-                        # it is important that `len()` works on `input_stream`
-                        response = self.services.session.upload_file(
-                            bucket_id, file_name, len(input_stream), content_type, content_sha1,
-                            file_info, input_stream
-                        )
-                        if content_sha1 == HEX_DIGITS_AT_END:
-                            content_sha1 = input_stream.hash
-                        assert content_sha1 == response['contentSha1']
-                        return FileVersionInfoFactory.from_api_response(response)
-
-                except B2Error as e:
-                    if not e.should_retry_upload():
-                        raise
-                    exception_info_list.append(e)
-                    self.account_info.clear_bucket_upload_data(bucket_id)
+
+        for _ in range(self.MAX_UPLOAD_ATTEMPTS):
+            try:
+                with upload_source.open() as file:
+                    input_stream = ReadingStreamWithProgress(
+                        file, progress_listener, length=content_length
+                    )
+                    if upload_source.is_sha1_known():
+                        content_sha1 = upload_source.get_content_sha1()
+                    else:
+                        input_stream = StreamWithHash(input_stream, stream_length=content_length)
+                        content_sha1 = HEX_DIGITS_AT_END
+                    # it is important that `len()` works on `input_stream`
+                    response = self.services.session.upload_file(
+                        bucket_id, file_name, len(input_stream), content_type, content_sha1,
+                        file_info, input_stream
+                    )
+                    if content_sha1 == HEX_DIGITS_AT_END:
+                        content_sha1 = input_stream.hash
+                    assert content_sha1 == response['contentSha1']
+                    return FileVersionInfoFactory.from_api_response(response)
+
+            except B2Error as e:
+                if not e.should_retry_upload():
+                    raise
+                exception_info_list.append(e)
+                self.account_info.clear_bucket_upload_data(bucket_id)
 
         raise MaxRetriesExceeded(self.MAX_UPLOAD_ATTEMPTS, exception_info_list)
diff --git a/test/unit/v0/test_bucket.py b/test/unit/v0/test_bucket.py
index c2e12f6da..90cb486d5 100644
--- a/test/unit/v0/test_bucket.py
+++ b/test/unit/v0/test_bucket.py
@@ -424,8 +424,8 @@ def test_upload_bytes(self):
 
     def test_upload_bytes_progress(self):
         data = b'hello world'
-        progress_listener = StubProgressListener()
-        self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        with StubProgressListener() as progress_listener:
+            self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertTrue(progress_listener.is_valid())
 
     def test_upload_local_file(self):
@@ -475,8 +475,8 @@ def test_upload_file_too_many_retryable_errors(self):
 
     def test_upload_large(self):
         data = self._make_data(self.simulator.MIN_PART_SIZE * 3)
-        progress_listener = StubProgressListener()
-        self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        with StubProgressListener() as progress_listener:
+            self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
 
@@ -485,8 +485,8 @@ def test_upload_large_resume(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
         self._upload_part(large_file_id, 1, data[:part_size])
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -495,8 +495,8 @@ def test_upload_large_resume_no_parts(self):
         part_size = self.simulator.MIN_PART_SIZE
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertNotEqual(large_file_id, file_info.id_)  # it's not a match if there are no parts
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -508,8 +508,8 @@ def test_upload_large_resume_all_parts_there(self):
         self._upload_part(large_file_id, 1, data[:part_size])
         self._upload_part(large_file_id, 2, data[part_size:2 * part_size])
         self._upload_part(large_file_id, 3, data[2 * part_size:])
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -519,8 +519,8 @@ def test_upload_large_resume_part_does_not_match(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
         self._upload_part(large_file_id, 3, data[:part_size])  # wrong part number for this data
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertNotEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -530,8 +530,8 @@ def test_upload_large_resume_wrong_part_size(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
         self._upload_part(large_file_id, 1, data[:part_size + 1])  # one byte to much
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertNotEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -541,10 +541,13 @@ def test_upload_large_resume_file_info(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1', {'property': 'value1'})
         self._upload_part(large_file_id, 1, data[:part_size])
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(
-            data, 'file1', progress_listener=progress_listener, file_infos={'property': 'value1'}
-        )
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(
+                data,
+                'file1',
+                progress_listener=progress_listener,
+                file_infos={'property': 'value1'}
+            )
         self.assertEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -554,10 +557,13 @@ def test_upload_large_resume_file_info_does_not_match(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1', {'property': 'value1'})
         self._upload_part(large_file_id, 1, data[:part_size])
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(
-            data, 'file1', progress_listener=progress_listener, file_infos={'property': 'value2'}
-        )
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(
+                data,
+                'file1',
+                progress_listener=progress_listener,
+                file_infos={'property': 'value2'}
+            )
         self.assertNotEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
diff --git a/test/unit/v0/test_download_dest.py b/test/unit/v0/test_download_dest.py
index 21726237a..9268b3831 100644
--- a/test/unit/v0/test_download_dest.py
+++ b/test/unit/v0/test_download_dest.py
@@ -75,12 +75,12 @@ def test_write_and_set_mod_time_and_progress(self):
         with TempDir() as temp_dir:
             file_path = os.path.join(temp_dir, "test.txt")
             download_local_file = DownloadDestLocalFile(file_path)
-            progress_listener = ProgressListenerForTest()
-            download_dest = DownloadDestProgressWrapper(download_local_file, progress_listener)
-            with download_dest.make_file_context(
-                "file_id", "file_name", 100, "content_type", "sha1", {}, mod_time
-            ) as f:
-                f.write(b'hello world\n')
+            with ProgressListenerForTest() as progress_listener:
+                download_dest = DownloadDestProgressWrapper(download_local_file, progress_listener)
+                with download_dest.make_file_context(
+                    "file_id", "file_name", 100, "content_type", "sha1", {}, mod_time
+                ) as f:
+                    f.write(b'hello world\n')
             with open(file_path, 'rb') as f:
                 self.assertEqual(b'hello world\n', f.read())
             self.assertEqual(mod_time, int(os.path.getmtime(file_path) * 1000))
diff --git a/test/unit/v1/test_bucket.py b/test/unit/v1/test_bucket.py
index 207439ce2..ba100ac19 100644
--- a/test/unit/v1/test_bucket.py
+++ b/test/unit/v1/test_bucket.py
@@ -510,8 +510,8 @@ def test_upload_bytes(self):
 
     def test_upload_bytes_progress(self):
         data = b'hello world'
-        progress_listener = StubProgressListener()
-        self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        with StubProgressListener() as progress_listener:
+            self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertTrue(progress_listener.is_valid())
 
     def test_upload_local_file(self):
@@ -576,8 +576,8 @@ def test_upload_file_too_many_retryable_errors(self):
 
     def test_upload_large(self):
         data = self._make_data(self.simulator.MIN_PART_SIZE * 3)
-        progress_listener = StubProgressListener()
-        self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        with StubProgressListener() as progress_listener:
+            self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
 
@@ -594,8 +594,8 @@ def test_upload_large_resume(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
         self._upload_part(large_file_id, 1, data[:part_size])
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -604,8 +604,8 @@ def test_upload_large_resume_no_parts(self):
         part_size = self.simulator.MIN_PART_SIZE
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertNotEqual(large_file_id, file_info.id_)  # it's not a match if there are no parts
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -617,8 +617,8 @@ def test_upload_large_resume_all_parts_there(self):
         self._upload_part(large_file_id, 1, data[:part_size])
         self._upload_part(large_file_id, 2, data[part_size:2 * part_size])
         self._upload_part(large_file_id, 3, data[2 * part_size:])
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -628,8 +628,8 @@ def test_upload_large_resume_part_does_not_match(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
         self._upload_part(large_file_id, 3, data[:part_size])  # wrong part number for this data
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertNotEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -639,8 +639,8 @@ def test_upload_large_resume_wrong_part_size(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
         self._upload_part(large_file_id, 1, data[:part_size + 1])  # one byte to much
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertNotEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -650,10 +650,13 @@ def test_upload_large_resume_file_info(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1', {'property': 'value1'})
         self._upload_part(large_file_id, 1, data[:part_size])
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(
-            data, 'file1', progress_listener=progress_listener, file_infos={'property': 'value1'}
-        )
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(
+                data,
+                'file1',
+                progress_listener=progress_listener,
+                file_infos={'property': 'value1'}
+            )
         self.assertEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -663,10 +666,13 @@ def test_upload_large_resume_file_info_does_not_match(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1', {'property': 'value1'})
         self._upload_part(large_file_id, 1, data[:part_size])
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(
-            data, 'file1', progress_listener=progress_listener, file_infos={'property': 'value2'}
-        )
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(
+                data,
+                'file1',
+                progress_listener=progress_listener,
+                file_infos={'property': 'value2'}
+            )
         self.assertNotEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -675,10 +681,10 @@ def test_upload_large_file_with_restricted_api_key(self):
         self.simulator.key_id_to_key[self.account_id].name_prefix_or_none = 'path/to'
         part_size = self.simulator.MIN_PART_SIZE
         data = self._make_data(part_size * 3)
-        progress_listener = StubProgressListener()
-        file_info = self.bucket.upload_bytes(
-            data, 'path/to/file1', progress_listener=progress_listener
-        )
+        with StubProgressListener() as progress_listener:
+            file_info = self.bucket.upload_bytes(
+                data, 'path/to/file1', progress_listener=progress_listener
+            )
         self.assertEqual(len(data), file_info.size)
         self._check_file_contents('path/to/file1', data)
         self.assertTrue(progress_listener.is_valid())
diff --git a/test/unit/v1/test_download_dest.py b/test/unit/v1/test_download_dest.py
index 80cf07f79..976d525fa 100644
--- a/test/unit/v1/test_download_dest.py
+++ b/test/unit/v1/test_download_dest.py
@@ -75,12 +75,12 @@ def test_write_and_set_mod_time_and_progress(self):
         with TempDir() as temp_dir:
             file_path = os.path.join(temp_dir, "test.txt")
             download_local_file = DownloadDestLocalFile(file_path)
-            progress_listener = ProgressListenerForTest()
-            download_dest = DownloadDestProgressWrapper(download_local_file, progress_listener)
-            with download_dest.make_file_context(
-                "file_id", "file_name", 100, "content_type", "sha1", {}, mod_time
-            ) as f:
-                f.write(b'hello world\n')
+            with ProgressListenerForTest() as progress_listener:
+                download_dest = DownloadDestProgressWrapper(download_local_file, progress_listener)
+                with download_dest.make_file_context(
+                    "file_id", "file_name", 100, "content_type", "sha1", {}, mod_time
+                ) as f:
+                    f.write(b'hello world\n')
             with open(file_path, 'rb') as f:
                 self.assertEqual(b'hello world\n', f.read())
             self.assertEqual(mod_time, int(os.path.getmtime(file_path) * 1000))

From ded44992e861d198df864b1556a8008487a08515 Mon Sep 17 00:00:00 2001
From: Maciej Lech <maciej.lech@reef.pl>
Date: Thu, 1 Oct 2020 19:46:19 +0200
Subject: [PATCH 11/15] Add better API handling in conftest

---
 test/unit/conftest.py | 14 ++++++++------
 1 file changed, 8 insertions(+), 6 deletions(-)

diff --git a/test/unit/conftest.py b/test/unit/conftest.py
index 25e994ca1..b8022388c 100644
--- a/test/unit/conftest.py
+++ b/test/unit/conftest.py
@@ -15,13 +15,15 @@
 
 pytest.register_assert_rewrite('test.unit')
 
+API_VERSIONS = ['v0', 'v1']
+
 
 @pytest.hookimpl
 def pytest_addoption(parser):
     parser.addoption(
         '--api',
-        default='v1',
-        choices=['v0', 'v1'],
+        default=API_VERSIONS[-1],
+        choices=API_VERSIONS,
         help='version of the API',
     )
 
@@ -40,10 +42,10 @@ def pytest_report_header(config):
 def pytest_ignore_collect(path, config):
     path = str(path)
     ver = config.getoption('--api')
-    if ver == 'v1' and 'v0' + os.sep in path:
-        return True
-    if ver == 'v0' and 'v1' + os.sep in path:
-        return True
+    other_versions = [v for v in API_VERSIONS if v != ver]
+    for other_version in other_versions:
+        if other_version + os.sep in path:
+            return True
     return False
 
 

From 3a3da88d2444ec4eedbcee028563e8bdb7762516 Mon Sep 17 00:00:00 2001
From: Maciej Lech <maciej.lech@reef.pl>
Date: Thu, 1 Oct 2020 19:56:59 +0200
Subject: [PATCH 12/15] Change error class in sync

---
 b2sdk/sync/sync.py          | 4 ++--
 test/unit/sync/test_sync.py | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/b2sdk/sync/sync.py b/b2sdk/sync/sync.py
index daa6ee70e..fd9d706ee 100644
--- a/b2sdk/sync/sync.py
+++ b/b2sdk/sync/sync.py
@@ -175,7 +175,7 @@ def sync_folders(self, source_folder, dest_folder, now_millis, reporter):
         dest_type = dest_folder.folder_type()
 
         if source_type != 'b2' and dest_type != 'b2':
-            raise NotImplementedError('Sync between two local folders is not supported!')
+            raise ValueError('Sync between two local folders is not supported!')
 
         # For downloads, make sure that the target directory is there.
         if dest_type == 'local' and not self.dry_run:
@@ -244,7 +244,7 @@ def make_folder_sync_actions(
         dest_type = dest_folder.folder_type()
         sync_type = '%s-to-%s' % (source_type, dest_type)
         if source_type != 'b2' and dest_type != 'b2':
-            raise NotImplementedError('Sync between two local folders is not supported!')
+            raise ValueError('Sync between two local folders is not supported!')
 
         total_files = 0
         total_bytes = 0
diff --git a/test/unit/sync/test_sync.py b/test/unit/sync/test_sync.py
index bf182f78e..fc34d8e64 100644
--- a/test/unit/sync/test_sync.py
+++ b/test/unit/sync/test_sync.py
@@ -71,7 +71,7 @@ def test_illegal_args(self, synchronizer_factory, apiver, args):
             synchronizer_factory(**args)
 
     def test_illegal(self, synchronizer):
-        with pytest.raises(NotImplementedError):
+        with pytest.raises(ValueError):
             src = self.local_folder_factory()
             dst = self.local_folder_factory()
             self.assert_folder_sync_actions(synchronizer, src, dst, [])

From 9dba8b00c4b0d477f072b0ac8b82f2c4af6147fb Mon Sep 17 00:00:00 2001
From: Maciej Lech <maciej.lech@reef.pl>
Date: Sat, 10 Oct 2020 18:09:14 +0200
Subject: [PATCH 13/15] Update after PR

---
 b2sdk/sync/report.py  |  4 +++-
 b2sdk/sync/sync.py    | 14 ++++++++------
 test/unit/conftest.py | 10 +++++++++-
 3 files changed, 20 insertions(+), 8 deletions(-)

diff --git a/b2sdk/sync/report.py b/b2sdk/sync/report.py
index deec9199d..68eebec80 100644
--- a/b2sdk/sync/report.py
+++ b/b2sdk/sync/report.py
@@ -126,7 +126,9 @@ def _update_progress(self):
                         format_and_scale_number(rate, 'B/s')
                     )  # yapf: disable
                 else:
-                    message = ' updated: %d/%d files   %s   %s' % (
+                    message = ' compare: %d/%d files   updated: %d/%d files   %s   %s' % (
+                        self.compare_count,
+                        self.total_count,
                         self.transfer_files,
                         self.total_transfer_files,
                         format_and_scale_fraction(self.transfer_bytes, self.total_transfer_bytes, 'B'),
diff --git a/b2sdk/sync/sync.py b/b2sdk/sync/sync.py
index fd9d706ee..310e64ac5 100644
--- a/b2sdk/sync/sync.py
+++ b/b2sdk/sync/sync.py
@@ -200,18 +200,20 @@ def sync_folders(self, source_folder, dest_folder, now_millis, reporter):
             # that should be fastest, and it provides scale for the progress reporting.
             sync_executor.submit(count_files, source_folder, reporter, self.policies_manager)
 
-        # Schedule each of the actions
-        bucket = None
+        # Bucket for scheduling actions.
+        # For bucket-to-bucket sync, the bucket for the API calls should be the destination.
+        action_bucket = None
         if dest_type == 'b2':
-            bucket = dest_folder.bucket
+            action_bucket = dest_folder.bucket
         elif source_type == 'b2':
-            bucket = source_folder.bucket
+            action_bucket = source_folder.bucket
 
+        # Schedule each of the actions.
         for action in self.make_folder_sync_actions(
             source_folder, dest_folder, now_millis, reporter, self.policies_manager
         ):
-            logging.debug('scheduling action %s on bucket %s', action, bucket)
-            sync_executor.submit(action.run, bucket, reporter, self.dry_run)
+            logging.debug('scheduling action %s on bucket %s', action, action_bucket)
+            sync_executor.submit(action.run, action_bucket, reporter, self.dry_run)
 
         # Wait for everything to finish
         sync_executor.shutdown()
diff --git a/test/unit/conftest.py b/test/unit/conftest.py
index b8022388c..a696956ed 100644
--- a/test/unit/conftest.py
+++ b/test/unit/conftest.py
@@ -9,13 +9,21 @@
 ######################################################################
 import os
 import sys
+from glob import glob
 from pathlib import Path
 
 import pytest
 
 pytest.register_assert_rewrite('test.unit')
 
-API_VERSIONS = ['v0', 'v1']
+
+def get_api_versions():
+    return [
+        str(Path(p).parent.name) for p in sorted(glob(str(Path(__file__).parent / 'v*/apiver/')))
+    ]
+
+
+API_VERSIONS = get_api_versions()
 
 
 @pytest.hookimpl

From 3d5a0b83a7d6abed6ec8b53fe4d030ba6cfe4093 Mon Sep 17 00:00:00 2001
From: Maciej Lech <maciej.lech@reef.pl>
Date: Sat, 10 Oct 2020 18:10:56 +0200
Subject: [PATCH 14/15] Revert "Fix usage of progress listener"

This reverts commit 2f0f77f9206cbbe598b941751835638a5300d560.
---
 b2sdk/download_dest.py                    |  3 +-
 b2sdk/sync/action.py                      | 39 +++++++--------
 b2sdk/transfer/emerge/executor.py         | 41 ++++++++--------
 b2sdk/transfer/outbound/copy_manager.py   | 50 +++++++++----------
 b2sdk/transfer/outbound/upload_manager.py | 56 +++++++++++-----------
 test/unit/v0/test_bucket.py               | 50 +++++++++----------
 test/unit/v0/test_download_dest.py        | 12 ++---
 test/unit/v1/test_bucket.py               | 58 ++++++++++-------------
 test/unit/v1/test_download_dest.py        | 12 ++---
 9 files changed, 155 insertions(+), 166 deletions(-)

diff --git a/b2sdk/download_dest.py b/b2sdk/download_dest.py
index 1bf6f0ef1..35d69ed91 100644
--- a/b2sdk/download_dest.py
+++ b/b2sdk/download_dest.py
@@ -219,4 +219,5 @@ def write_file_and_report_progress_context(
             if range_ is not None:
                 total_bytes = range_[1] - range_[0] + 1
             self.progress_listener.set_total_bytes(total_bytes)
-            yield WritingStreamWithProgress(file_, self.progress_listener)
+            with self.progress_listener:
+                yield WritingStreamWithProgress(file_, self.progress_listener)
diff --git a/b2sdk/sync/action.py b/b2sdk/sync/action.py
index 6caa8aae9..bacbe01f5 100644
--- a/b2sdk/sync/action.py
+++ b/b2sdk/sync/action.py
@@ -13,7 +13,6 @@
 import logging
 import os
 from ..download_dest import DownloadDestLocalFile
-from ..progress import DoNothingProgressListener
 from ..raw_api import SRC_LAST_MODIFIED_MILLIS
 from ..transfer.outbound.upload_source import UploadSourceLocalFile
 from .report import SyncFileReporter
@@ -123,15 +122,13 @@ def do_action(self, bucket, reporter):
         if reporter:
             progress_listener = SyncFileReporter(reporter)
         else:
-            progress_listener = DoNothingProgressListener()
-
-        with progress_listener:
-            bucket.upload(
-                UploadSourceLocalFile(self.local_full_path),
-                self.b2_file_name,
-                file_info={SRC_LAST_MODIFIED_MILLIS: str(self.mod_time_millis)},
-                progress_listener=progress_listener
-            )
+            progress_listener = None
+        bucket.upload(
+            UploadSourceLocalFile(self.local_full_path),
+            self.b2_file_name,
+            file_info={SRC_LAST_MODIFIED_MILLIS: str(self.mod_time_millis)},
+            progress_listener=progress_listener
+        )
 
     def do_report(self, bucket, reporter):
         """
@@ -248,13 +245,12 @@ def do_action(self, bucket, reporter):
         if reporter:
             progress_listener = SyncFileReporter(reporter)
         else:
-            progress_listener = DoNothingProgressListener()
+            progress_listener = None
 
         # Download the file to a .tmp file
         download_path = self.local_full_path + '.b2.sync.tmp'
         download_dest = DownloadDestLocalFile(download_path)
-        with progress_listener:
-            bucket.download_file_by_id(self.file_id, download_dest, progress_listener)
+        bucket.download_file_by_id(self.file_id, download_dest, progress_listener)
 
         # Move the file into place
         try:
@@ -328,15 +324,14 @@ def do_action(self, bucket, reporter):
         if reporter:
             progress_listener = SyncFileReporter(reporter)
         else:
-            progress_listener = DoNothingProgressListener()
-
-        with progress_listener:
-            bucket.copy(
-                self.file_id,
-                self.dest_b2_file_name,
-                length=self.size,
-                progress_listener=progress_listener
-            )
+            progress_listener = None
+
+        bucket.copy(
+            self.file_id,
+            self.dest_b2_file_name,
+            length=self.size,
+            progress_listener=progress_listener
+        )
 
     def do_report(self, bucket, reporter):
         """
diff --git a/b2sdk/transfer/emerge/executor.py b/b2sdk/transfer/emerge/executor.py
index b351706b5..600431069 100644
--- a/b2sdk/transfer/emerge/executor.py
+++ b/b2sdk/transfer/emerge/executor.py
@@ -147,26 +147,27 @@ def execute_plan(self, emerge_plan):
             )
         file_id = unfinished_file.file_id
 
-        large_file_upload_state = LargeFileUploadState(self.progress_listener)
-
-        part_futures = []
-        for part_number, emerge_part in emerge_plan.enumerate_emerge_parts():
-            execution_step_factory = LargeFileEmergeExecutionStepFactory(
-                self,
-                emerge_part,
-                part_number,
-                file_id,
-                large_file_upload_state,
-                finished_parts=finished_parts,
-            )
-            execution_step = execution_step_factory.get_execution_step()
-            future = self._execute_step(execution_step)
-            part_futures.append(future)
-
-        # Collect the sha1 checksums of the parts as the uploads finish.
-        # If any of them raised an exception, that same exception will
-        # be raised here by result()
-        part_sha1_array = [interruptible_get_result(f)['contentSha1'] for f in part_futures]
+        with self.progress_listener:
+            large_file_upload_state = LargeFileUploadState(self.progress_listener)
+
+            part_futures = []
+            for part_number, emerge_part in emerge_plan.enumerate_emerge_parts():
+                execution_step_factory = LargeFileEmergeExecutionStepFactory(
+                    self,
+                    emerge_part,
+                    part_number,
+                    file_id,
+                    large_file_upload_state,
+                    finished_parts=finished_parts,
+                )
+                execution_step = execution_step_factory.get_execution_step()
+                future = self._execute_step(execution_step)
+                part_futures.append(future)
+
+            # Collect the sha1 checksums of the parts as the uploads finish.
+            # If any of them raised an exception, that same exception will
+            # be raised here by result()
+            part_sha1_array = [interruptible_get_result(f)['contentSha1'] for f in part_futures]
 
         # Finish the large file
         response = self.services.session.finish_large_file(file_id, part_sha1_array)
diff --git a/b2sdk/transfer/outbound/copy_manager.py b/b2sdk/transfer/outbound/copy_manager.py
index c92162b64..fa39edf1c 100644
--- a/b2sdk/transfer/outbound/copy_manager.py
+++ b/b2sdk/transfer/outbound/copy_manager.py
@@ -151,29 +151,31 @@ def _copy_small_file(
         destination_bucket_id,
         progress_listener,
     ):
-        progress_listener.set_total_bytes(copy_source.get_content_length() or 0)
-
-        bytes_range = copy_source.get_bytes_range()
-
-        if content_type is None:
-            if file_info is not None:
-                raise ValueError('File info can be set only when content type is set')
-            metadata_directive = MetadataDirectiveMode.COPY
-        else:
-            if file_info is None:
-                raise ValueError('File info can be not set only when content type is not set')
-            metadata_directive = MetadataDirectiveMode.REPLACE
-
-        response = self.services.session.copy_file(
-            copy_source.file_id,
-            file_name,
-            bytes_range=bytes_range,
-            metadata_directive=metadata_directive,
-            content_type=content_type,
-            file_info=file_info,
-            destination_bucket_id=destination_bucket_id
-        )
-        file_info = FileVersionInfoFactory.from_api_response(response)
-        progress_listener.bytes_completed(file_info.size)
+        with progress_listener:
+            progress_listener.set_total_bytes(copy_source.get_content_length() or 0)
+
+            bytes_range = copy_source.get_bytes_range()
+
+            if content_type is None:
+                if file_info is not None:
+                    raise ValueError('File info can be set only when content type is set')
+                metadata_directive = MetadataDirectiveMode.COPY
+            else:
+                if file_info is None:
+                    raise ValueError('File info can be not set only when content type is not set')
+                metadata_directive = MetadataDirectiveMode.REPLACE
+
+            response = self.services.session.copy_file(
+                copy_source.file_id,
+                file_name,
+                bytes_range=bytes_range,
+                metadata_directive=metadata_directive,
+                content_type=content_type,
+                file_info=file_info,
+                destination_bucket_id=destination_bucket_id
+            )
+            file_info = FileVersionInfoFactory.from_api_response(response)
+            if progress_listener is not None:
+                progress_listener.bytes_completed(file_info.size)
 
         return file_info
diff --git a/b2sdk/transfer/outbound/upload_manager.py b/b2sdk/transfer/outbound/upload_manager.py
index 0c9017765..535c171d8 100644
--- a/b2sdk/transfer/outbound/upload_manager.py
+++ b/b2sdk/transfer/outbound/upload_manager.py
@@ -184,32 +184,34 @@ def _upload_small_file(
         content_length = upload_source.get_content_length()
         exception_info_list = []
         progress_listener.set_total_bytes(content_length)
-
-        for _ in range(self.MAX_UPLOAD_ATTEMPTS):
-            try:
-                with upload_source.open() as file:
-                    input_stream = ReadingStreamWithProgress(
-                        file, progress_listener, length=content_length
-                    )
-                    if upload_source.is_sha1_known():
-                        content_sha1 = upload_source.get_content_sha1()
-                    else:
-                        input_stream = StreamWithHash(input_stream, stream_length=content_length)
-                        content_sha1 = HEX_DIGITS_AT_END
-                    # it is important that `len()` works on `input_stream`
-                    response = self.services.session.upload_file(
-                        bucket_id, file_name, len(input_stream), content_type, content_sha1,
-                        file_info, input_stream
-                    )
-                    if content_sha1 == HEX_DIGITS_AT_END:
-                        content_sha1 = input_stream.hash
-                    assert content_sha1 == response['contentSha1']
-                    return FileVersionInfoFactory.from_api_response(response)
-
-            except B2Error as e:
-                if not e.should_retry_upload():
-                    raise
-                exception_info_list.append(e)
-                self.account_info.clear_bucket_upload_data(bucket_id)
+        with progress_listener:
+            for _ in range(self.MAX_UPLOAD_ATTEMPTS):
+                try:
+                    with upload_source.open() as file:
+                        input_stream = ReadingStreamWithProgress(
+                            file, progress_listener, length=content_length
+                        )
+                        if upload_source.is_sha1_known():
+                            content_sha1 = upload_source.get_content_sha1()
+                        else:
+                            input_stream = StreamWithHash(
+                                input_stream, stream_length=content_length
+                            )
+                            content_sha1 = HEX_DIGITS_AT_END
+                        # it is important that `len()` works on `input_stream`
+                        response = self.services.session.upload_file(
+                            bucket_id, file_name, len(input_stream), content_type, content_sha1,
+                            file_info, input_stream
+                        )
+                        if content_sha1 == HEX_DIGITS_AT_END:
+                            content_sha1 = input_stream.hash
+                        assert content_sha1 == response['contentSha1']
+                        return FileVersionInfoFactory.from_api_response(response)
+
+                except B2Error as e:
+                    if not e.should_retry_upload():
+                        raise
+                    exception_info_list.append(e)
+                    self.account_info.clear_bucket_upload_data(bucket_id)
 
         raise MaxRetriesExceeded(self.MAX_UPLOAD_ATTEMPTS, exception_info_list)
diff --git a/test/unit/v0/test_bucket.py b/test/unit/v0/test_bucket.py
index 07fcecb6b..eba4af50f 100644
--- a/test/unit/v0/test_bucket.py
+++ b/test/unit/v0/test_bucket.py
@@ -425,8 +425,8 @@ def test_upload_bytes(self):
 
     def test_upload_bytes_progress(self):
         data = b'hello world'
-        with StubProgressListener() as progress_listener:
-            self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        progress_listener = StubProgressListener()
+        self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertTrue(progress_listener.is_valid())
 
     def test_upload_local_file(self):
@@ -472,8 +472,8 @@ def test_upload_file_too_many_retryable_errors(self):
 
     def test_upload_large(self):
         data = self._make_data(self.simulator.MIN_PART_SIZE * 3)
-        with StubProgressListener() as progress_listener:
-            self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        progress_listener = StubProgressListener()
+        self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
 
@@ -482,8 +482,8 @@ def test_upload_large_resume(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
         self._upload_part(large_file_id, 1, data[:part_size])
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -492,8 +492,8 @@ def test_upload_large_resume_no_parts(self):
         part_size = self.simulator.MIN_PART_SIZE
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertNotEqual(large_file_id, file_info.id_)  # it's not a match if there are no parts
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -505,8 +505,8 @@ def test_upload_large_resume_all_parts_there(self):
         self._upload_part(large_file_id, 1, data[:part_size])
         self._upload_part(large_file_id, 2, data[part_size:2 * part_size])
         self._upload_part(large_file_id, 3, data[2 * part_size:])
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -516,8 +516,8 @@ def test_upload_large_resume_part_does_not_match(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
         self._upload_part(large_file_id, 3, data[:part_size])  # wrong part number for this data
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertNotEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -527,8 +527,8 @@ def test_upload_large_resume_wrong_part_size(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
         self._upload_part(large_file_id, 1, data[:part_size + 1])  # one byte to much
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertNotEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -538,13 +538,10 @@ def test_upload_large_resume_file_info(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1', {'property': 'value1'})
         self._upload_part(large_file_id, 1, data[:part_size])
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(
-                data,
-                'file1',
-                progress_listener=progress_listener,
-                file_infos={'property': 'value1'}
-            )
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(
+            data, 'file1', progress_listener=progress_listener, file_infos={'property': 'value1'}
+        )
         self.assertEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -554,13 +551,10 @@ def test_upload_large_resume_file_info_does_not_match(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1', {'property': 'value1'})
         self._upload_part(large_file_id, 1, data[:part_size])
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(
-                data,
-                'file1',
-                progress_listener=progress_listener,
-                file_infos={'property': 'value2'}
-            )
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(
+            data, 'file1', progress_listener=progress_listener, file_infos={'property': 'value2'}
+        )
         self.assertNotEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
diff --git a/test/unit/v0/test_download_dest.py b/test/unit/v0/test_download_dest.py
index 9268b3831..21726237a 100644
--- a/test/unit/v0/test_download_dest.py
+++ b/test/unit/v0/test_download_dest.py
@@ -75,12 +75,12 @@ def test_write_and_set_mod_time_and_progress(self):
         with TempDir() as temp_dir:
             file_path = os.path.join(temp_dir, "test.txt")
             download_local_file = DownloadDestLocalFile(file_path)
-            with ProgressListenerForTest() as progress_listener:
-                download_dest = DownloadDestProgressWrapper(download_local_file, progress_listener)
-                with download_dest.make_file_context(
-                    "file_id", "file_name", 100, "content_type", "sha1", {}, mod_time
-                ) as f:
-                    f.write(b'hello world\n')
+            progress_listener = ProgressListenerForTest()
+            download_dest = DownloadDestProgressWrapper(download_local_file, progress_listener)
+            with download_dest.make_file_context(
+                "file_id", "file_name", 100, "content_type", "sha1", {}, mod_time
+            ) as f:
+                f.write(b'hello world\n')
             with open(file_path, 'rb') as f:
                 self.assertEqual(b'hello world\n', f.read())
             self.assertEqual(mod_time, int(os.path.getmtime(file_path) * 1000))
diff --git a/test/unit/v1/test_bucket.py b/test/unit/v1/test_bucket.py
index a0134f016..71c7c6e32 100644
--- a/test/unit/v1/test_bucket.py
+++ b/test/unit/v1/test_bucket.py
@@ -511,8 +511,8 @@ def test_upload_bytes(self):
 
     def test_upload_bytes_progress(self):
         data = b'hello world'
-        with StubProgressListener() as progress_listener:
-            self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        progress_listener = StubProgressListener()
+        self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertTrue(progress_listener.is_valid())
 
     def test_upload_local_file(self):
@@ -575,8 +575,8 @@ def test_upload_file_too_many_retryable_errors(self):
 
     def test_upload_large(self):
         data = self._make_data(self.simulator.MIN_PART_SIZE * 3)
-        with StubProgressListener() as progress_listener:
-            self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        progress_listener = StubProgressListener()
+        self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
 
@@ -593,8 +593,8 @@ def test_upload_large_resume(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
         self._upload_part(large_file_id, 1, data[:part_size])
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -603,8 +603,8 @@ def test_upload_large_resume_no_parts(self):
         part_size = self.simulator.MIN_PART_SIZE
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertNotEqual(large_file_id, file_info.id_)  # it's not a match if there are no parts
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -616,8 +616,8 @@ def test_upload_large_resume_all_parts_there(self):
         self._upload_part(large_file_id, 1, data[:part_size])
         self._upload_part(large_file_id, 2, data[part_size:2 * part_size])
         self._upload_part(large_file_id, 3, data[2 * part_size:])
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -627,8 +627,8 @@ def test_upload_large_resume_part_does_not_match(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
         self._upload_part(large_file_id, 3, data[:part_size])  # wrong part number for this data
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertNotEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -638,8 +638,8 @@ def test_upload_large_resume_wrong_part_size(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1')
         self._upload_part(large_file_id, 1, data[:part_size + 1])  # one byte to much
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(data, 'file1', progress_listener=progress_listener)
         self.assertNotEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -649,13 +649,10 @@ def test_upload_large_resume_file_info(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1', {'property': 'value1'})
         self._upload_part(large_file_id, 1, data[:part_size])
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(
-                data,
-                'file1',
-                progress_listener=progress_listener,
-                file_infos={'property': 'value1'}
-            )
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(
+            data, 'file1', progress_listener=progress_listener, file_infos={'property': 'value1'}
+        )
         self.assertEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -665,13 +662,10 @@ def test_upload_large_resume_file_info_does_not_match(self):
         data = self._make_data(part_size * 3)
         large_file_id = self._start_large_file('file1', {'property': 'value1'})
         self._upload_part(large_file_id, 1, data[:part_size])
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(
-                data,
-                'file1',
-                progress_listener=progress_listener,
-                file_infos={'property': 'value2'}
-            )
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(
+            data, 'file1', progress_listener=progress_listener, file_infos={'property': 'value2'}
+        )
         self.assertNotEqual(large_file_id, file_info.id_)
         self._check_file_contents('file1', data)
         self.assertTrue(progress_listener.is_valid())
@@ -680,10 +674,10 @@ def test_upload_large_file_with_restricted_api_key(self):
         self.simulator.key_id_to_key[self.account_id].name_prefix_or_none = 'path/to'
         part_size = self.simulator.MIN_PART_SIZE
         data = self._make_data(part_size * 3)
-        with StubProgressListener() as progress_listener:
-            file_info = self.bucket.upload_bytes(
-                data, 'path/to/file1', progress_listener=progress_listener
-            )
+        progress_listener = StubProgressListener()
+        file_info = self.bucket.upload_bytes(
+            data, 'path/to/file1', progress_listener=progress_listener
+        )
         self.assertEqual(len(data), file_info.size)
         self._check_file_contents('path/to/file1', data)
         self.assertTrue(progress_listener.is_valid())
diff --git a/test/unit/v1/test_download_dest.py b/test/unit/v1/test_download_dest.py
index 976d525fa..80cf07f79 100644
--- a/test/unit/v1/test_download_dest.py
+++ b/test/unit/v1/test_download_dest.py
@@ -75,12 +75,12 @@ def test_write_and_set_mod_time_and_progress(self):
         with TempDir() as temp_dir:
             file_path = os.path.join(temp_dir, "test.txt")
             download_local_file = DownloadDestLocalFile(file_path)
-            with ProgressListenerForTest() as progress_listener:
-                download_dest = DownloadDestProgressWrapper(download_local_file, progress_listener)
-                with download_dest.make_file_context(
-                    "file_id", "file_name", 100, "content_type", "sha1", {}, mod_time
-                ) as f:
-                    f.write(b'hello world\n')
+            progress_listener = ProgressListenerForTest()
+            download_dest = DownloadDestProgressWrapper(download_local_file, progress_listener)
+            with download_dest.make_file_context(
+                "file_id", "file_name", 100, "content_type", "sha1", {}, mod_time
+            ) as f:
+                f.write(b'hello world\n')
             with open(file_path, 'rb') as f:
                 self.assertEqual(b'hello world\n', f.read())
             self.assertEqual(mod_time, int(os.path.getmtime(file_path) * 1000))

From 93013c184bbe3835ea45688accfdb541b2746a52 Mon Sep 17 00:00:00 2001
From: Maciej Lech <63924630+mlech-reef@users.noreply.github.com>
Date: Wed, 14 Oct 2020 19:30:38 +0200
Subject: [PATCH 15/15] Update b2sdk/sync/action.py
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Co-authored-by: Paweł Polewicz <p.polewicz@gmail.com>
---
 b2sdk/sync/action.py | 18 ++++++------------
 1 file changed, 6 insertions(+), 12 deletions(-)

diff --git a/b2sdk/sync/action.py b/b2sdk/sync/action.py
index bacbe01f5..13124e9cd 100644
--- a/b2sdk/sync/action.py
+++ b/b2sdk/sync/action.py
@@ -285,18 +285,12 @@ def __init__(
         self, relative_name, b2_file_name, file_id, dest_b2_file_name, mod_time_millis, size
     ):
         """
-        :param relative_name: a relative file name
-        :type relative_name: str
-        :param b2_file_name: a name of a remote file
-        :type b2_file_name: str
-        :param file_id: a file ID
-        :type file_id: str
-        :param dest_b2_file_name: a name of a destination remote file
-        :type dest_b2_file_name: str
-        :param mod_time_millis: file modification time in milliseconds
-        :type mod_time_millis: int
-        :param size: a file size
-        :type size: int
+        :param str relative_name: a relative file name
+        :param str b2_file_name: a name of a remote file
+        :param str file_id: a file ID
+        :param str dest_b2_file_name: a name of a destination remote file
+        :param int mod_time_millis: file modification time in milliseconds
+        :param int size: a file size
         """
         self.relative_name = relative_name
         self.b2_file_name = b2_file_name