From 313fc3d9fe4c3615d69be8aaff744be646b05b42 Mon Sep 17 00:00:00 2001 From: Olzhas Arystanov Date: Fri, 20 Dec 2024 21:15:22 +0500 Subject: [PATCH] Format with ruff --- b2/_internal/_cli/arg_parser_types.py | 8 +- b2/_internal/_cli/argcompleters.py | 15 +- b2/_internal/_cli/autocomplete_cache.py | 11 +- b2/_internal/_cli/autocomplete_install.py | 86 +- b2/_internal/_cli/b2api.py | 5 +- b2/_internal/_cli/b2args.py | 68 +- b2/_internal/_cli/obj_dumps.py | 20 +- b2/_internal/_cli/obj_loads.py | 4 +- b2/_internal/_utils/python_compat.py | 3 +- b2/_internal/_utils/uri.py | 42 +- b2/_internal/arg_parser.py | 22 +- b2/_internal/b2v3/registry.py | 1 + b2/_internal/b2v3/rm.py | 4 +- b2/_internal/console_tool.py | 481 +++-- doc/source/conf.py | 64 +- noxfile.py | 125 +- pyproject.toml | 1 + test/conftest.py | 6 +- test/integration/conftest.py | 58 +- test/integration/helpers.py | 82 +- test/integration/persistent_bucket.py | 21 +- test/integration/test_autocomplete.py | 19 +- test/integration/test_b2_command_line.py | 1496 ++++++++------ test/integration/test_help.py | 6 +- test/integration/test_tqdm_closer.py | 12 +- test/static/test_licenses.py | 12 +- test/unit/_cli/fixtures/dummy_command.py | 8 +- test/unit/_cli/test_autocomplete_cache.py | 12 +- test/unit/_cli/test_autocomplete_install.py | 43 +- test/unit/_cli/test_obj_dumps.py | 54 +- test/unit/_cli/test_obj_loads.py | 25 +- test/unit/_cli/test_pickle.py | 1 + test/unit/_cli/test_shell.py | 2 +- test/unit/_cli/unpickle.py | 2 +- test/unit/_utils/test_uri.py | 83 +- test/unit/conftest.py | 32 +- .../console_tool/test_authorize_account.py | 76 +- test/unit/console_tool/test_download_file.py | 100 +- test/unit/console_tool/test_file_hide.py | 10 +- test/unit/console_tool/test_file_info.py | 34 +- .../test_file_server_side_copy.py | 169 +- test/unit/console_tool/test_get_url.py | 29 +- test/unit/console_tool/test_help.py | 16 +- .../console_tool/test_install_autocomplete.py | 19 +- test/unit/console_tool/test_ls.py | 60 +- .../console_tool/test_notification_rules.py | 219 +- test/unit/console_tool/test_rm.py | 6 +- test/unit/console_tool/test_upload_file.py | 122 +- .../test_upload_unbound_stream.py | 53 +- test/unit/test_console_tool.py | 1754 +++++++++-------- test/unit/test_copy.py | 15 +- test/unit/test_represent_file_metadata.py | 19 +- 52 files changed, 3119 insertions(+), 2516 deletions(-) diff --git a/b2/_internal/_cli/arg_parser_types.py b/b2/_internal/_cli/arg_parser_types.py index 43b2de85a..e2e88775f 100644 --- a/b2/_internal/_cli/arg_parser_types.py +++ b/b2/_internal/_cli/arg_parser_types.py @@ -15,14 +15,14 @@ import arrow from b2sdk.v2 import RetentionPeriod -_arrow_version = tuple(int(p) for p in arrow.__version__.split(".")) +_arrow_version = tuple(int(p) for p in arrow.__version__.split('.')) def parse_comma_separated_list(s): """ Parse comma-separated list. """ - return [word.strip() for word in s.split(",")] + return [word.strip() for word in s.split(',')] def parse_millis_from_float_timestamp(s): @@ -31,9 +31,9 @@ def parse_millis_from_float_timestamp(s): """ parsed = arrow.get(float(s)) if _arrow_version < (1, 0, 0): - return int(parsed.format("XSSS")) + return int(parsed.format('XSSS')) else: - return int(parsed.format("x")[:13]) + return int(parsed.format('x')[:13]) def parse_range(s): diff --git a/b2/_internal/_cli/argcompleters.py b/b2/_internal/_cli/argcompleters.py index d59d75478..8620da791 100644 --- a/b2/_internal/_cli/argcompleters.py +++ b/b2/_internal/_cli/argcompleters.py @@ -19,11 +19,12 @@ def bucket_name_completer(prefix, parsed_args, **kwargs): from b2sdk.v2 import unprintable_to_hex from b2._internal._cli.b2api import _get_b2api_for_profile + api = _get_b2api_for_profile(getattr(parsed_args, 'profile', None)) res = [ unprintable_to_hex(bucket_name_alias) for bucket_name_alias in itertools.chain.from_iterable( - (bucket.name, f"b2://{bucket.name}") for bucket in api.list_buckets(use_cache=True) + (bucket.name, f'b2://{bucket.name}') for bucket in api.list_buckets(use_cache=True) ) ] return res @@ -69,29 +70,29 @@ def b2uri_file_completer(prefix: str, parsed_args, **kwargs): prefix_without_scheme = removeprefix(prefix, 'b2://') if '/' not in prefix_without_scheme: return [ - f"b2://{unprintable_to_hex(bucket.name)}/" + f'b2://{unprintable_to_hex(bucket.name)}/' for bucket in api.list_buckets(use_cache=True) ] b2_uri = parse_b2_uri(prefix) bucket = api.get_bucket_by_name(b2_uri.bucket_name) file_versions = bucket.ls( - f"{b2_uri.path}*", + f'{b2_uri.path}*', latest_only=True, recursive=True, fetch_count=LIST_FILE_NAMES_MAX_LIMIT, with_wildcard=True, ) return [ - unprintable_to_hex(f"b2://{bucket.name}/{file_version.file_name}") + unprintable_to_hex(f'b2://{bucket.name}/{file_version.file_name}') for file_version, folder_name in islice(file_versions, LIST_FILE_NAMES_MAX_LIMIT) if file_version ] elif prefix.startswith('b2id://'): # listing all files from all buckets is unreasonably expensive - return ["b2id://"] + return ['b2id://'] else: return [ - "b2://", - "b2id://", + 'b2://', + 'b2id://', ] diff --git a/b2/_internal/_cli/autocomplete_cache.py b/b2/_internal/_cli/autocomplete_cache.py index 0e63bcee7..bf18c475f 100644 --- a/b2/_internal/_cli/autocomplete_cache.py +++ b/b2/_internal/_cli/autocomplete_cache.py @@ -57,13 +57,14 @@ def __init__(self, dir_path: pathlib.Path | None = None) -> None: def _cache_dir(self) -> pathlib.Path: if not self._dir: - self._dir = pathlib.Path( - platformdirs.user_cache_dir(appname='b2', appauthor='backblaze') - ) / 'autocomplete' + self._dir = ( + pathlib.Path(platformdirs.user_cache_dir(appname='b2', appauthor='backblaze')) + / 'autocomplete' + ) return self._dir def _fname(self, identifier: str) -> str: - return f"b2-autocomplete-cache-{identifier}.pickle" + return f'b2-autocomplete-cache-{identifier}.pickle' def get_pickle(self, identifier: str) -> bytes | None: path = self._cache_dir() / self._fname(identifier) @@ -93,7 +94,7 @@ def __init__( self, tracker: StateTracker, store: PickleStore, - unpickle: Callable[[bytes], argparse.ArgumentParser] | None = None + unpickle: Callable[[bytes], argparse.ArgumentParser] | None = None, ): self._tracker = tracker self._store = store diff --git a/b2/_internal/_cli/autocomplete_install.py b/b2/_internal/_cli/autocomplete_install.py index 8a906b16e..f3aba56c1 100644 --- a/b2/_internal/_cli/autocomplete_install.py +++ b/b2/_internal/_cli/autocomplete_install.py @@ -37,9 +37,9 @@ def autocomplete_install(prog: str, shell: str = 'bash') -> None: try: autocomplete_installer = SHELL_REGISTRY.get(shell, prog=prog) except RegistryKeyError: - raise AutocompleteInstallError(f"Unsupported shell: {shell}") + raise AutocompleteInstallError(f'Unsupported shell: {shell}') autocomplete_installer.install() - logger.info("Autocomplete for %s has been enabled.", prog) + logger.info('Autocomplete for %s has been enabled.', prog) class ShellAutocompleteInstaller(abc.ABC): @@ -53,8 +53,9 @@ def install(self) -> None: script_path = self.create_script() if not self.is_enabled(): logger.info( - "%s completion doesn't seem to be autoloaded from %s.", self.shell_exec, - script_path.parent + "%s completion doesn't seem to be autoloaded from %s.", + self.shell_exec, + script_path.parent, ) try: self.force_enable(script_path) @@ -64,15 +65,15 @@ def install(self) -> None: ) if not self.is_enabled(): - logger.error("Autocomplete is still not enabled.") - raise AutocompleteInstallError(f"Autocomplete for {self.prog} install failed.") + logger.error('Autocomplete is still not enabled.') + raise AutocompleteInstallError(f'Autocomplete for {self.prog} install failed.') def create_script(self) -> Path: """Create autocomplete for the given program.""" shellcode = self.get_shellcode() script_path = self.get_script_path() - logger.info("Creating autocompletion script under %s", script_path) + logger.info('Creating autocompletion script under %s', script_path) script_path.parent.mkdir(exist_ok=True, parents=True, mode=0o755) script_path.write_text(shellcode) return script_path @@ -116,25 +117,25 @@ def force_enable(self, completion_script: Path) -> None: """Enable autocomplete for the given program, common logic.""" rc_path = self.get_rc_path() if rc_path.exists() and rc_path.read_text().strip(): - bck_path = rc_path.with_suffix(f".{datetime.now():%Y-%m-%dT%H-%M-%S}.bak") - logger.warning("Backing up %s to %s", rc_path, bck_path) + bck_path = rc_path.with_suffix(f'.{datetime.now():%Y-%m-%dT%H-%M-%S}.bak') + logger.warning('Backing up %s to %s', rc_path, bck_path) try: shutil.copyfile(rc_path, bck_path) except OSError as e: raise AutocompleteInstallError( - f"Failed to backup {rc_path} under {bck_path}" + f'Failed to backup {rc_path} under {bck_path}' ) from e - logger.warning("Explicitly adding %s to %s", completion_script, rc_path) + logger.warning('Explicitly adding %s to %s', completion_script, rc_path) add_or_update_shell_section( - rc_path, f"{self.prog} autocomplete", self.prog, self.get_rc_section(completion_script) + rc_path, f'{self.prog} autocomplete', self.prog, self.get_rc_section(completion_script) ) def get_rc_section(self, completion_script: Path) -> str: - return f"source {quote(str(completion_script))}" + return f'source {quote(str(completion_script))}' def get_script_path(self) -> Path: """Get autocomplete script path for the given program, common logic.""" - script_dir = Path(f"~/.{self.shell_exec}_completion.d/").expanduser() + script_dir = Path(f'~/.{self.shell_exec}_completion.d/').expanduser() return script_dir / self.prog def is_enabled(self) -> bool: @@ -145,13 +146,13 @@ def is_enabled(self) -> bool: @SHELL_REGISTRY.register('bash') class BashAutocompleteInstaller(BashLikeAutocompleteInstaller): shell_exec = 'bash' - rc_file_path = "~/.bashrc" + rc_file_path = '~/.bashrc' @SHELL_REGISTRY.register('zsh') class ZshAutocompleteInstaller(BashLikeAutocompleteInstaller): shell_exec = 'zsh' - rc_file_path = "~/.zshrc" + rc_file_path = '~/.zshrc' def get_rc_section(self, completion_script: Path) -> str: return textwrap.dedent( @@ -163,7 +164,7 @@ def get_rc_section(self, completion_script: Path) -> str: def get_script_path(self) -> Path: """Custom get_script_path for Zsh, if the structure differs from the base implementation.""" - return Path("~/.zsh/completion/").expanduser() / f"_{self.prog}" + return Path('~/.zsh/completion/').expanduser() / f'_{self.prog}' def is_enabled(self) -> bool: rc_path = self.get_rc_path() @@ -181,7 +182,7 @@ def is_enabled(self) -> bool: @SHELL_REGISTRY.register('fish') class FishAutocompleteInstaller(ShellAutocompleteInstaller): shell_exec = 'fish' - rc_file_path = "~/.config/fish/config.fish" + rc_file_path = '~/.config/fish/config.fish' def force_enable(self, completion_script: Path) -> None: raise NotImplementedError("Fish shell doesn't support manual completion enabling.") @@ -189,23 +190,24 @@ def force_enable(self, completion_script: Path) -> None: def get_script_path(self) -> Path: """Get autocomplete script path for the given program, common logic.""" complete_paths = [ - Path(p) for p in shlex.split( + Path(p) + for p in shlex.split( subprocess.run( [self.shell_exec, '-c', 'echo $fish_complete_path'], timeout=30, text=True, check=True, - capture_output=True + capture_output=True, ).stdout ) ] - user_path = Path("~/.config/fish/completions").expanduser() + user_path = Path('~/.config/fish/completions').expanduser() if complete_paths: target_path = user_path if user_path in complete_paths else complete_paths[0] else: - logger.warning("$fish_complete_path is empty, falling back to %r", user_path) + logger.warning('$fish_complete_path is empty, falling back to %r', user_path) target_path = user_path - return target_path / f"{self.prog}.fish" + return target_path / f'{self.prog}.fish' def is_enabled(self) -> bool: """ @@ -216,11 +218,13 @@ def is_enabled(self) -> bool: named filenames). """ environ = os.environ.copy() - environ.setdefault("TERM", "xterm") # TERM has to be set for fish to load completions + environ.setdefault('TERM', 'xterm') # TERM has to be set for fish to load completions return _silent_success_run_with_tty( [ - self.shell_exec, '-i', '-c', - f'string length -q -- (complete -C{quote(f"{self.prog} ")} >/dev/null && complete -c {quote(self.prog)})' + self.shell_exec, + '-i', + '-c', + f'string length -q -- (complete -C{quote(f"{self.prog} ")} >/dev/null && complete -c {quote(self.prog)})', ], env=environ, ) @@ -233,8 +237,8 @@ def _silent_success_run_with_tty( if emulate_tty and not find_spec('pexpect'): emulate_tty = False logger.warning( - "pexpect is needed to check autocomplete installation correctness without tty. " - "You can install it via `pip install pexpect`." + 'pexpect is needed to check autocomplete installation correctness without tty. ' + 'You can install it via `pip install pexpect`.' ) run_func = _silent_success_run_with_pty if emulate_tty else _silent_success_run return run_func(cmd, timeout=timeout, env=env) @@ -255,12 +259,15 @@ def _silent_success_run(cmd: list[str], timeout: int = 30, env: dict | None = No except subprocess.TimeoutExpired: p.kill() stdout, stderr = p.communicate(timeout=1) - logger.warning("Command %r timed out, stdout: %r, stderr: %r", cmd, stdout, stderr) + logger.warning('Command %r timed out, stdout: %r, stderr: %r', cmd, stdout, stderr) else: logger.log( logging.DEBUG if p.returncode == 0 else logging.WARNING, - "Command %r exited with code %r, stdout: %r, stderr: %r", cmd, p.returncode, stdout, - stderr + 'Command %r exited with code %r, stdout: %r, stderr: %r', + cmd, + p.returncode, + stdout, + stderr, ) return p.returncode == 0 @@ -281,7 +288,7 @@ def _silent_success_run_with_pty( child.logfile_read = output child.expect(pexpect.EOF) except pexpect.TIMEOUT: - logger.warning("Command %r timed out, output: %r", cmd, output.getvalue()) + logger.warning('Command %r timed out, output: %r', cmd, output.getvalue()) child.kill(signal.SIGKILL) return False finally: @@ -289,22 +296,25 @@ def _silent_success_run_with_pty( logger.log( logging.DEBUG if child.exitstatus == 0 else logging.WARNING, - "Command %r exited with code %r, output: %r", cmd, child.exitstatus, output.getvalue() + 'Command %r exited with code %r, output: %r', + cmd, + child.exitstatus, + output.getvalue(), ) return child.exitstatus == 0 def add_or_update_shell_section( - path: Path, section: str, managed_by: str, content: str, comment_sign="#" + path: Path, section: str, managed_by: str, content: str, comment_sign='#' ) -> None: """Add or update a section in a file.""" - section_start = f"{comment_sign} >>> {section} >>>" - section_end = f"{comment_sign} <<< {section} <<<" + section_start = f'{comment_sign} >>> {section} >>>' + section_end = f'{comment_sign} <<< {section} <<<' assert section_end not in content try: file_content = path.read_text() except FileNotFoundError: - file_content = "" + file_content = '' full_content = f""" {section_start} @@ -319,7 +329,7 @@ def add_or_update_shell_section( if pattern.search(file_content): file_content = pattern.sub(full_content, file_content) else: - file_content += f"\n{full_content}\n" + file_content += f'\n{full_content}\n' path.write_text(file_content) diff --git a/b2/_internal/_cli/b2api.py b/b2/_internal/_cli/b2api.py index fc35d458a..9fe0c8efc 100644 --- a/b2/_internal/_cli/b2api.py +++ b/b2/_internal/_cli/b2api.py @@ -29,7 +29,6 @@ def _get_b2api_for_profile( raise_if_does_not_exist: bool = False, **kwargs, ) -> B2Api: - if raise_if_does_not_exist: account_info_file = SqliteAccountInfo.get_user_account_info_path(profile=profile) if not os.path.exists(account_info_file): @@ -64,4 +63,6 @@ def _get_inmemory_b2api(**kwargs) -> B2Api: def _get_b2httpapiconfig(): - return B2HttpApiConfig(user_agent_append=os.environ.get(B2_USER_AGENT_APPEND_ENV_VAR),) + return B2HttpApiConfig( + user_agent_append=os.environ.get(B2_USER_AGENT_APPEND_ENV_VAR), + ) diff --git a/b2/_internal/_cli/b2args.py b/b2/_internal/_cli/b2args.py index fb026724f..f06e824a8 100644 --- a/b2/_internal/_cli/b2args.py +++ b/b2/_internal/_cli/b2args.py @@ -10,6 +10,7 @@ """ Utility functions for adding b2-specific arguments to an argparse parser. """ + import argparse import functools from os import environ @@ -27,7 +28,7 @@ def b2id_uri(value: str) -> B2FileIdURI: b2_uri = parse_b2_uri(value) if not isinstance(b2_uri, B2FileIdURI): - raise ValueError(f"B2 URI pointing to a file id is required, but {value} was provided") + raise ValueError(f'B2 URI pointing to a file id is required, but {value} was provided') return b2_uri @@ -35,11 +36,11 @@ def b2_bucket_uri(value: str) -> B2URI: b2_uri = parse_b2_uri(value) if not isinstance(b2_uri, B2URI): raise ValueError( - f"B2 URI pointing to a bucket object is required, but {value} was provided" + f'B2 URI pointing to a bucket object is required, but {value} was provided' ) if b2_uri.path: raise ValueError( - f"B2 URI pointing to a bucket object is required, but {value!r} was provided which contains path part: {b2_uri.path!r}" + f'B2 URI pointing to a bucket object is required, but {value!r} was provided which contains path part: {b2_uri.path!r}' ) return b2_uri @@ -49,7 +50,7 @@ def b2id_or_b2_bucket_uri(value: str) -> Union[B2URI, B2FileIdURI]: if isinstance(b2_uri, B2URI): if b2_uri.path: raise ValueError( - f"B2 URI pointing to a bucket object is required, but {value!r} was provided which contains path part: {b2_uri.path!r}" + f'B2 URI pointing to a bucket object is required, but {value!r} was provided which contains path part: {b2_uri.path!r}' ) return b2_uri return b2_uri @@ -60,14 +61,14 @@ def b2id_or_file_like_b2_uri(value: str, *, by_id: Optional[bool] = None) -> B2U if isinstance(b2_uri, B2URI): if b2_uri.is_dir(): raise ValueError( - f"B2 URI pointing to a file-like object is required, but {value} was provided" + f'B2 URI pointing to a file-like object is required, but {value} was provided' ) return b2_uri elif isinstance(b2_uri, B2FileIdURI): if by_id is False: raise ValueError( - f"B2 URI pointing to file-like object by name is required (e.g. b2://bucketName/fileName)," - f" but {value} was provided" + f'B2 URI pointing to file-like object by name is required (e.g. b2://bucketName/fileName),' + f' but {value} was provided' ) return b2_uri @@ -78,15 +79,16 @@ def parse_bucket_name(value: str, allow_all_buckets: bool = False) -> str: if isinstance(uri, B2URI): if uri.path: raise ValueError( - f"Expected a bucket name, but {value!r} was provided which contains path part: {uri.path!r}" + f'Expected a bucket name, but {value!r} was provided which contains path part: {uri.path!r}' ) return uri.bucket_name return str(value) -def b2id_or_file_like_b2_uri_or_bucket_name(value: str, *, - by_id: Optional[bool] = None) -> Union[B2URIBase, str]: - if "://" not in value: +def b2id_or_file_like_b2_uri_or_bucket_name( + value: str, *, by_id: Optional[bool] = None +) -> Union[B2URIBase, str]: + if '://' not in value: return value else: b2_uri = b2id_or_file_like_b2_uri(value, by_id=by_id) @@ -107,22 +109,22 @@ def b2id_or_file_like_b2_uri_or_bucket_name(value: str, *, def add_bucket_name_argument( - parser: argparse.ArgumentParser, name="bucketName", help="Target bucket name", nargs=None + parser: argparse.ArgumentParser, name='bucketName', help='Target bucket name', nargs=None ): parser.add_argument( name, type=wrap_with_argument_type_error( - functools.partial(parse_bucket_name, allow_all_buckets=nargs == "?") + functools.partial(parse_bucket_name, allow_all_buckets=nargs == '?') ), help=help, - nargs=nargs + nargs=nargs, ).completer = bucket_name_completer def add_b2_uri_argument( parser: argparse.ArgumentParser, - name="B2_URI", - help="B2 URI pointing to a bucket with optional path, e.g. b2://yourBucket, b2://yourBucket/file.txt, b2://yourBucket/folderName/", + name='B2_URI', + help='B2 URI pointing to a bucket with optional path, e.g. b2://yourBucket, b2://yourBucket/file.txt, b2://yourBucket/folderName/', ): """ Add B2 URI as an argument to the parser. @@ -138,7 +140,7 @@ def add_b2_uri_argument( def add_b2_bucket_uri_argument( parser: argparse.ArgumentParser, - name="B2_URI", + name='B2_URI', ): """ Add B2 URI as an argument to the parser. @@ -148,23 +150,23 @@ def add_b2_bucket_uri_argument( parser.add_argument( name, type=B2_BUCKET_URI_ARG_TYPE, - help="B2 URI pointing to a bucket, e.g. b2://yourBucket", + help='B2 URI pointing to a bucket, e.g. b2://yourBucket', ).completer = b2uri_file_completer -def add_b2id_uri_argument(parser: argparse.ArgumentParser, name="B2_URI"): +def add_b2id_uri_argument(parser: argparse.ArgumentParser, name='B2_URI'): """ Add B2 URI (b2id://) as an argument to the parser. """ parser.add_argument( name, type=B2ID_URI_ARG_TYPE, - help="B2 URI pointing to a file id. e.g. b2id://fileId", + help='B2 URI pointing to a file id. e.g. b2id://fileId', ).completer = b2uri_file_completer def add_b2id_or_b2_uri_argument( - parser: argparse.ArgumentParser, name="B2_URI", *, allow_all_buckets: bool = False + parser: argparse.ArgumentParser, name='B2_URI', *, allow_all_buckets: bool = False ): """ Add B2 URI (b2:// or b2id://) as an argument to the parser. @@ -178,33 +180,33 @@ def add_b2id_or_b2_uri_argument( name, type=B2ID_OR_B2_URI_OR_ALL_BUCKETS_ARG_TYPE, default=None, - nargs="?", - help="B2 URI pointing to a bucket, directory, file or all buckets. " - "e.g. b2://yourBucket, b2://yourBucket/file.txt, b2://yourBucket/folderName/, b2id://fileId, or b2://", + nargs='?', + help='B2 URI pointing to a bucket, directory, file or all buckets. ' + 'e.g. b2://yourBucket, b2://yourBucket/file.txt, b2://yourBucket/folderName/, b2id://fileId, or b2://', ) else: argument_spec = parser.add_argument( name, type=B2ID_OR_B2_URI_ARG_TYPE, - help="B2 URI pointing to a bucket, directory or a file. " - "e.g. b2://yourBucket, b2://yourBucket/file.txt, b2://yourBucket/folderName/, or b2id://fileId", + help='B2 URI pointing to a bucket, directory or a file. ' + 'e.g. b2://yourBucket, b2://yourBucket/file.txt, b2://yourBucket/folderName/, or b2id://fileId', ) argument_spec.completer = b2uri_file_completer -def add_b2id_or_b2_bucket_uri_argument(parser: argparse.ArgumentParser, name="B2_URI"): +def add_b2id_or_b2_bucket_uri_argument(parser: argparse.ArgumentParser, name='B2_URI'): arg = parser.add_argument( name, type=B2ID_OR_B2_BUCKET_URI_ARG_TYPE, - help="B2 URI pointing to a bucket, or a file id. e.g. b2://yourBucket, or b2id://fileId", + help='B2 URI pointing to a bucket, or a file id. e.g. b2://yourBucket, or b2id://fileId', ) arg.completer = b2uri_file_completer return arg def add_b2id_or_file_like_b2_uri_argument( - parser: argparse.ArgumentParser, name="B2_URI", *, by_id: Optional[bool] = None + parser: argparse.ArgumentParser, name='B2_URI', *, by_id: Optional[bool] = None ): """ Add a B2 URI pointing to a file as an argument to the parser. @@ -214,21 +216,21 @@ def add_b2id_or_file_like_b2_uri_argument( type=wrap_with_argument_type_error( functools.partial(b2id_or_file_like_b2_uri, by_id=by_id) ), - help="B2 URI pointing to a file, e.g. b2://yourBucket/file.txt or b2id://fileId", + help='B2 URI pointing to a file, e.g. b2://yourBucket/file.txt or b2id://fileId', ) arg.completer = b2uri_file_completer return arg def add_b2id_or_file_like_b2_uri_or_bucket_name_argument( - parser: argparse.ArgumentParser, name="B2_URI", by_id: Optional[bool] = None + parser: argparse.ArgumentParser, name='B2_URI', by_id: Optional[bool] = None ): """ Add a B2 URI pointing to a file as an argument to the parser. """ - help_ = "B2 URI pointing to a file, e.g. b2://yourBucket/file.txt" + help_ = 'B2 URI pointing to a file, e.g. b2://yourBucket/file.txt' if by_id is not False: - help_ += " or b2id://fileId" + help_ += ' or b2id://fileId' arg = parser.add_argument( name, type=wrap_with_argument_type_error( diff --git a/b2/_internal/_cli/obj_dumps.py b/b2/_internal/_cli/obj_dumps.py index 3a80de8f6..ae43238ac 100644 --- a/b2/_internal/_cli/obj_dumps.py +++ b/b2/_internal/_cli/obj_dumps.py @@ -14,11 +14,11 @@ ) _simple_repr_map = { - False: "false", - None: "null", - True: "true", + False: 'false', + None: 'null', + True: 'true', } -_simple_repr_map_values = set(_simple_repr_map.values()) | {"yes", "no"} +_simple_repr_map_values = set(_simple_repr_map.values()) | {'yes', 'no'} def _yaml_simple_repr(obj): @@ -31,27 +31,27 @@ def _yaml_simple_repr(obj): if simple_repr: return simple_repr obj_repr = unprintable_to_hex(str(obj)) - if isinstance( - obj, str - ) and (obj == "" or obj_repr.lower() in _simple_repr_map_values or obj_repr.isdigit()): + if isinstance(obj, str) and ( + obj == '' or obj_repr.lower() in _simple_repr_map_values or obj_repr.isdigit() + ): obj_repr = repr(obj) # add quotes to distinguish from numbers and booleans return obj_repr def _id_name_first_key(item): try: - return ("id", "name").index(str(item[0]).lower()), item[0], item[1] + return ('id', 'name').index(str(item[0]).lower()), item[0], item[1] except ValueError: return 2, item[0], item[1] def _dump(data, indent=0, skip=False, *, output): - prefix = " " * indent + prefix = ' ' * indent if isinstance(data, dict): for idx, (key, value) in enumerate(sorted(data.items(), key=_id_name_first_key)): output.write(f"{'' if skip and idx == 0 else prefix}{_yaml_simple_repr(key)}: ") if isinstance(value, (dict, list)): - output.write("\n") + output.write('\n') _dump(value, indent + 2, output=output) else: _dump(value, 0, True, output=output) diff --git a/b2/_internal/_cli/obj_loads.py b/b2/_internal/_cli/obj_loads.py index d7dd3b67c..a512ca53a 100644 --- a/b2/_internal/_cli/obj_loads.py +++ b/b2/_internal/_cli/obj_loads.py @@ -67,7 +67,7 @@ def type_with_config(type_: type[T], config: pydantic.ConfigDict) -> type[T]: def validated_loads(data: str, expected_type: type[T] | None = None) -> T: val = _UNDEF if expected_type is not None and pydantic is not None: - expected_type = type_with_config(expected_type, pydantic.ConfigDict(extra="allow")) + expected_type = type_with_config(expected_type, pydantic.ConfigDict(extra='allow')) try: ta = TypeAdapter(expected_type) except TypeError: @@ -75,7 +75,7 @@ def validated_loads(data: str, expected_type: type[T] | None = None) -> T: # This is thrown on python<3.10 even with eval_type_backport logger.debug( f'Failed to create TypeAdapter for {expected_type!r} using pydantic, falling back to json.loads', - exc_info=True + exc_info=True, ) val = _UNDEF else: diff --git a/b2/_internal/_utils/python_compat.py b/b2/_internal/_utils/python_compat.py index 411586553..a561edd14 100644 --- a/b2/_internal/_utils/python_compat.py +++ b/b2/_internal/_utils/python_compat.py @@ -10,12 +10,13 @@ """ Utilities for compatibility with older Python versions. """ + import sys if sys.version_info < (3, 9): def removeprefix(s: str, prefix: str) -> str: - return s[len(prefix):] if s.startswith(prefix) else s + return s[len(prefix) :] if s.startswith(prefix) else s else: removeprefix = str.removeprefix diff --git a/b2/_internal/_utils/uri.py b/b2/_internal/_utils/uri.py index 71965d794..be6788b0e 100644 --- a/b2/_internal/_utils/uri.py +++ b/b2/_internal/_utils/uri.py @@ -45,10 +45,10 @@ class B2URI(B2URIBase): """ bucket_name: str - path: str = "" + path: str = '' def __str__(self) -> str: - return f"b2://{self.bucket_name}/{self.path}" + return f'b2://{self.bucket_name}/{self.path}' def is_dir(self) -> bool | None: """ @@ -66,7 +66,7 @@ def is_dir(self) -> bool | None: :return: True if the path is a directory, None if it is unknown """ - return not self.path or self.path.endswith("/") or None + return not self.path or self.path.endswith('/') or None @dataclasses.dataclass(frozen=True) @@ -78,7 +78,7 @@ class B2FileIdURI(B2URIBase): file_id: str def __str__(self) -> str: - return f"b2id://{self.file_id}" + return f'b2id://{self.file_id}' def parse_uri(uri: str, *, allow_all_buckets: bool = False) -> Path | B2URI | B2FileIdURI: @@ -91,9 +91,9 @@ def parse_uri(uri: str, *, allow_all_buckets: bool = False) -> Path | B2URI | B2 :raises ValueError: if the URI is invalid """ if not uri: - raise ValueError("URI cannot be empty") + raise ValueError('URI cannot be empty') parsed = urllib.parse.urlsplit(uri) - if parsed.scheme == "": + if parsed.scheme == '': return pathlib.Path(uri) return _parse_b2_uri(uri, parsed, allow_all_buckets=allow_all_buckets) @@ -119,29 +119,29 @@ def _parse_b2_uri( parsed: urllib.parse.SplitResult, *, allow_all_buckets: bool = False, - allow_b2id: bool = True + allow_b2id: bool = True, ) -> B2URI | B2FileIdURI: - if parsed.scheme in ("b2", "b2id"): - path = urllib.parse.urlunsplit(parsed._replace(scheme="", netloc="")) + if parsed.scheme in ('b2', 'b2id'): + path = urllib.parse.urlunsplit(parsed._replace(scheme='', netloc='')) if not parsed.netloc: if allow_all_buckets: if path: raise ValueError( f"Invalid B2 URI: all buckets URI doesn't allow non-empty path, but {path!r} was provided" ) - return B2URI(bucket_name="") - raise ValueError(f"Invalid B2 URI: {uri!r}") + return B2URI(bucket_name='') + raise ValueError(f'Invalid B2 URI: {uri!r}') elif parsed.password or parsed.username: raise ValueError( - "Invalid B2 URI: credentials passed using `user@password:` syntax is not supported in URI" + 'Invalid B2 URI: credentials passed using `user@password:` syntax is not supported in URI' ) - if parsed.scheme == "b2": - return B2URI(bucket_name=parsed.netloc, path=removeprefix(path, "/")) - elif parsed.scheme == "b2id" and allow_b2id: + if parsed.scheme == 'b2': + return B2URI(bucket_name=parsed.netloc, path=removeprefix(path, '/')) + elif parsed.scheme == 'b2id' and allow_b2id: return B2FileIdURI(file_id=parsed.netloc) else: - raise ValueError(f"Unsupported URI scheme: {parsed.scheme!r}") + raise ValueError(f'Unsupported URI scheme: {parsed.scheme!r}') class B2URIAdapter: @@ -159,7 +159,7 @@ def __getattr__(self, name): @singledispatchmethod def download_file_by_uri(self, uri, *args, **kwargs): - raise NotImplementedError(f"Unsupported URI type: {type(uri)}") + raise NotImplementedError(f'Unsupported URI type: {type(uri)}') @download_file_by_uri.register def _(self, uri: B2URI, *args, **kwargs): @@ -172,7 +172,7 @@ def _(self, uri: B2FileIdURI, *args, **kwargs): @singledispatchmethod def get_file_info_by_uri(self, uri, *args, **kwargs): - raise NotImplementedError(f"Unsupported URI type: {type(uri)}") + raise NotImplementedError(f'Unsupported URI type: {type(uri)}') @get_file_info_by_uri.register def _(self, uri: B2URI, *args, **kwargs) -> DownloadVersion: @@ -184,7 +184,7 @@ def _(self, uri: B2FileIdURI, *args, **kwargs) -> FileVersion: @singledispatchmethod def get_download_url_by_uri(self, uri, *args, **kwargs): - raise NotImplementedError(f"Unsupported URI type: {type(uri)}") + raise NotImplementedError(f'Unsupported URI type: {type(uri)}') @get_download_url_by_uri.register def _(self, uri: B2URI, *args, **kwargs) -> str: @@ -196,7 +196,7 @@ def _(self, uri: B2FileIdURI, *args, **kwargs) -> str: @singledispatchmethod def ls(self, uri, *args, **kwargs): - raise NotImplementedError(f"Unsupported URI type: {type(uri)}") + raise NotImplementedError(f'Unsupported URI type: {type(uri)}') @ls.register def _(self, uri: B2URI, *args, filters: Sequence[Filter] = (), **kwargs): @@ -214,7 +214,7 @@ def _(self, uri: B2FileIdURI, *args, **kwargs): @singledispatchmethod def copy_by_uri(self, uri, *args, **kwargs): - raise NotImplementedError(f"Unsupported URI type: {type(uri)}") + raise NotImplementedError(f'Unsupported URI type: {type(uri)}') @copy_by_uri.register def _(self, source: B2FileIdURI, destination: B2URI, *args, **kwargs): diff --git a/b2/_internal/arg_parser.py b/b2/_internal/arg_parser.py index 42f4fe00b..d59f03366 100644 --- a/b2/_internal/arg_parser.py +++ b/b2/_internal/arg_parser.py @@ -87,7 +87,7 @@ def __init__( add_help_all: bool = True, for_docs: bool = False, custom_deprecated: bool = False, - **kwargs + **kwargs, ): """ @@ -137,13 +137,13 @@ def _make_short_description(self, usage: str, raw_description: str) -> str: return usage if not raw_description: - return "" + return '' for line in raw_description.splitlines(): if line.strip(): return self._encode_description(line.strip()) - return "" + return '' def error(self, message): self.print_help() @@ -178,8 +178,9 @@ def print_help(self, *args, show_all: bool = False, **kwargs): """ patches = [ unittest.mock.patch.object( - self, 'formatter_class', - functools.partial(B2RawTextHelpFormatter, show_all=show_all) + self, + 'formatter_class', + functools.partial(B2RawTextHelpFormatter, show_all=show_all), ) ] if self._subparsers is not None and not show_all: @@ -213,7 +214,7 @@ def format_usage(self, use_short_description: bool = False, col_length: int = 16 return super().format_usage() formatter = self._get_formatter() - formatter.add_text(f"{self.prog:{col_length + 2}} {self._short_description}") + formatter.add_text(f'{self.prog:{col_length + 2}} {self._short_description}') return formatter.format_help() @@ -232,7 +233,7 @@ def deprecated_action_call(self, parser, namespace, values, option_string=None, kebab_option_string = _camel_to_kebab(option_string) print( f"The '{option_string}' argument is deprecated. Use '{kebab_option_string}' instead.", - file=sys.stderr + file=sys.stderr, ) return deprecated_action_call @@ -248,7 +249,7 @@ def _camel_to_kebab(s: str): def _kebab_to_camel(s: str): - return "--" + _kebab_to_camel_pattern.sub(lambda m: m.group(1).upper(), s[2:]) + return '--' + _kebab_to_camel_pattern.sub(lambda m: m.group(1).upper(), s[2:]) def _kebab_to_snake(s: str): @@ -280,8 +281,9 @@ def add_normalized_argument(parser, param_name, *args, **kwargs): action = argparse._StoreAction kwargs_camel['action'] = type( - 'DeprecatedAction', (action, DeprecatedActionMarker), - {'__call__': make_deprecated_action_call(action)} + 'DeprecatedAction', + (action, DeprecatedActionMarker), + {'__call__': make_deprecated_action_call(action)}, ) parser.add_argument(f'{param_name_kebab}', *args, **kwargs_kebab) diff --git a/b2/_internal/b2v3/registry.py b/b2/_internal/b2v3/registry.py index 400b7d33c..f2cde18ec 100644 --- a/b2/_internal/b2v3/registry.py +++ b/b2/_internal/b2v3/registry.py @@ -89,6 +89,7 @@ class Ls(B2URIMustPointToFolderMixin, B2URIBucketNFolderNameArgMixin, BaseLs): - **listFiles** - **listBuckets** (if bucket name is not provided) """ + ALLOW_ALL_BUCKETS = True diff --git a/b2/_internal/b2v3/rm.py b/b2/_internal/b2v3/rm.py index 39fe8f430..2a431f262 100644 --- a/b2/_internal/b2v3/rm.py +++ b/b2/_internal/b2v3/rm.py @@ -32,8 +32,8 @@ class B2URIMustPointToFolderMixin: def get_b2_uri_from_arg(self, args: argparse.Namespace) -> B2URI: b2_uri = super().get_b2_uri_from_arg(args) - if b2_uri.path and not args.with_wildcard and not b2_uri.path.endswith("/"): - b2_uri = dataclasses.replace(b2_uri, path=b2_uri.path + "/") + if b2_uri.path and not args.with_wildcard and not b2_uri.path.endswith('/'): + b2_uri = dataclasses.replace(b2_uri, path=b2_uri.path + '/') return b2_uri diff --git a/b2/_internal/console_tool.py b/b2/_internal/console_tool.py index ddeec663a..26471b8fb 100644 --- a/b2/_internal/console_tool.py +++ b/b2/_internal/console_tool.py @@ -227,12 +227,12 @@ def resolve_b2_bin_call_name(argv: list[str] | None = None) -> str: return call_name -FILE_RETENTION_COMPATIBILITY_WARNING = """ +FILE_RETENTION_COMPATIBILITY_WARNING = f""" .. warning:: - Setting file retention mode to '{}' is irreversible - such files can only be ever deleted after their retention + Setting file retention mode to '{RetentionMode.COMPLIANCE.value}' is irreversible - such files can only be ever deleted after their retention period passes, regardless of keys (master or not) used. This is especially dangerous when setting bucket default retention, as it may lead to high storage costs. -""".format(RetentionMode.COMPLIANCE.value) +""" # Strings available to use when formatting doc strings. DOC_STRING_DATA = dict( @@ -391,7 +391,7 @@ def _setup_parser(cls, parser): parser, '--default-server-side-encryption-algorithm', default='AES256', - choices=('AES256',) + choices=('AES256',), ) super()._setup_parser(parser) # noqa @@ -430,13 +430,13 @@ def _setup_parser(cls, parser): parser, '--destination-server-side-encryption', default=None, - choices=('SSE-B2', 'SSE-C') + choices=('SSE-B2', 'SSE-C'), ) add_normalized_argument( parser, '--destination-server-side-encryption-algorithm', default='AES256', - choices=('AES256',) + choices=('AES256',), ) super()._setup_parser(parser) # noqa @@ -453,8 +453,8 @@ def _get_destination_sse_setting(self, args): encryption_key_b64 = os.environ.get(B2_DESTINATION_SSE_C_KEY_B64_ENV_VAR) if not encryption_key_b64: raise ValueError( - 'Using SSE-C requires providing an encryption key via %s env var' % - B2_DESTINATION_SSE_C_KEY_B64_ENV_VAR + 'Using SSE-C requires providing an encryption key via %s env var' + % B2_DESTINATION_SSE_C_KEY_B64_ENV_VAR ) key_id = os.environ.get(B2_DESTINATION_SSE_C_KEY_ID_ENV_VAR) if key_id is None: @@ -484,7 +484,7 @@ def _setup_parser(cls, parser): parser, '--file-retention-mode', default=None, - choices=(RetentionMode.COMPLIANCE.value, RetentionMode.GOVERNANCE.value) + choices=(RetentionMode.COMPLIANCE.value, RetentionMode.GOVERNANCE.value), ) add_normalized_argument( @@ -492,7 +492,7 @@ def _setup_parser(cls, parser): '--retain-until', type=parse_millis_from_float_timestamp, default=None, - metavar='TIMESTAMP' + metavar='TIMESTAMP', ) super()._setup_parser(parser) # noqa @@ -516,37 +516,33 @@ def _setup_parser(cls, parser: argparse.ArgumentParser) -> None: add_normalized_argument( parser, '--cache-control', - help= - "optional Cache-Control header, value based on RFC 2616 section 14.9, example: 'public, max-age=86400')" + help="optional Cache-Control header, value based on RFC 2616 section 14.9, example: 'public, max-age=86400')", ) add_normalized_argument( parser, '--content-disposition', - help= - "optional Content-Disposition header, value based on RFC 2616 section 19.5.1, example: 'attachment; filename=\"fname.ext\"'" + help='optional Content-Disposition header, value based on RFC 2616 section 19.5.1, example: \'attachment; filename="fname.ext"\'', ) add_normalized_argument( parser, '--content-encoding', - help= - "optional Content-Encoding header, value based on RFC 2616 section 14.11, example: 'gzip'" + help="optional Content-Encoding header, value based on RFC 2616 section 14.11, example: 'gzip'", ) add_normalized_argument( parser, '--content-language', - help= - "optional Content-Language header, value based on RFC 2616 section 14.12, example: 'mi, en'" + help="optional Content-Language header, value based on RFC 2616 section 14.12, example: 'mi, en'", ) add_normalized_argument( parser, '--expires', - help= - "optional Expires header, value based on RFC 2616 section 14.21, example: 'Thu, 01 Dec 2050 16:00:00 GMT'" + help="optional Expires header, value based on RFC 2616 section 14.21, example: 'Thu, 01 Dec 2050 16:00:00 GMT'", ) super()._setup_parser(parser) - def _file_info_with_header_args(self, args, - file_info: dict[str, str] | None) -> dict[str, str] | None: + def _file_info_with_header_args( + self, args, file_info: dict[str, str] | None + ) -> dict[str, str] | None: """Construct an updated file_info dictionary. Print a warning if any of file_info items will be overwritten by explicit header arguments. """ @@ -569,8 +565,8 @@ def _file_info_with_header_args(self, args, if overwritten: self._print_stderr( - 'The following file info items will be overwritten by explicit arguments:\n ' + - '\n '.join(f'{key} = {add_file_info[key]}' for key in overwritten) + 'The following file info items will be overwritten by explicit arguments:\n ' + + '\n '.join(f'{key} = {add_file_info[key]}' for key in overwritten) ) if add_file_info: @@ -615,7 +611,7 @@ def _setup_parser(cls, parser): parser, '--source-server-side-encryption-algorithm', default='AES256', - choices=('AES256',) + choices=('AES256',), ) super()._setup_parser(parser) # noqa @@ -632,8 +628,8 @@ def _get_source_sse_setting(cls, args): encryption_key_b64 = os.environ.get(B2_SOURCE_SSE_C_KEY_B64_ENV_VAR) if not encryption_key_b64: raise ValueError( - 'Using SSE-C requires providing an encryption key via %s env var' % - B2_SOURCE_SSE_C_KEY_B64_ENV_VAR + 'Using SSE-C requires providing an encryption key via %s env var' + % B2_SOURCE_SSE_C_KEY_B64_ENV_VAR ) key = EncryptionKey( secret=base64.b64decode(encryption_key_b64), key_id=UNKNOWN_KEY_ID @@ -729,14 +725,14 @@ def get_b2_uri_from_arg(self, args: argparse.Namespace) -> B2URIBase: if args.fileName: raise argparse.ArgumentError( self._b2_uri_arg, - "Both B2 URI and file name were provided, but only one is expected" + 'Both B2 URI and file name were provided, but only one is expected', ) return args.B2_URI if not args.fileName: raise argparse.ArgumentError( self._b2_uri_arg, - f"Incorrect B2 URI was provided, expected `b2://bucketName/fileName`, but got {args.B2_URI!r}" + f'Incorrect B2 URI was provided, expected `b2://bucketName/fileName`, but got {args.B2_URI!r}', ) self._print_stderr( 'WARNING: "bucketName fileName" arguments syntax is deprecated, use "b2://bucketName/fileName" instead' @@ -785,7 +781,7 @@ def _setup_parser(cls, parser): super()._setup_parser(parser) def get_b2_uri_from_arg(self, args: argparse.Namespace) -> B2URI: - return B2URI(removeprefix(args.bucketName or '', "b2://"), args.folderName or '') + return B2URI(removeprefix(args.bucketName or '', 'b2://'), args.folderName or '') class B2IDOrB2URIMixin: @@ -858,7 +854,7 @@ class ProgressMixin(Described): @classmethod def _setup_parser(cls, parser): add_normalized_argument( - parser, '--no-progress', action='store_true', help="progress will not be reported" + parser, '--no-progress', action='store_true', help='progress will not be reported' ) super()._setup_parser(parser) # noqa @@ -882,14 +878,13 @@ def _setup_parser(cls, parser): default=None, type=functools.partial(validated_loads, expected_type=LifecycleRule), dest='lifecycle_rules', - help="Lifecycle rule in JSON format. Can be supplied multiple times.", + help='Lifecycle rule in JSON format. Can be supplied multiple times.', ) add_normalized_argument( lifecycle_group, '--lifecycle-rules', type=functools.partial(validated_loads, expected_type=List[LifecycleRule]), - help= - "(deprecated; use --lifecycle-rule instead) List of lifecycle rules in JSON format.", + help='(deprecated; use --lifecycle-rule instead) List of lifecycle rules in JSON format.', ) super()._setup_parser(parser) # noqa @@ -928,10 +923,11 @@ def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): - if sys.platform != "darwin" or os.environ.get('B2_TEST_DISABLE_TQDM_CLOSER'): + if sys.platform != 'darwin' or os.environ.get('B2_TEST_DISABLE_TQDM_CLOSER'): return try: from multiprocessing.synchronize import SemLock + tqdm_lock = self.progress_listener.tqdm.get_lock() if tqdm_lock.mp_lock._semlock.name is not None: SemLock._cleanup(tqdm_lock.mp_lock._semlock.name) @@ -1056,7 +1052,7 @@ def create_parser( '--no-escape-control-characters', dest='escape_control_characters', action='store_false', - help=argparse.SUPPRESS + help=argparse.SUPPRESS, ) common_parser.set_defaults(escape_control_characters=None) @@ -1074,7 +1070,7 @@ def create_parser( subparsers=subparsers, parents=parents, for_docs=for_docs, - b2_binary_name=b2_binary_name + b2_binary_name=b2_binary_name, ) return parser @@ -1086,8 +1082,7 @@ def run(self, args): return self._run(args) @abstractmethod - def _run(self, args) -> int: - ... + def _run(self, args) -> int: ... @classmethod def _setup_parser(cls, parser): @@ -1106,7 +1101,7 @@ def _parse_file_infos(cls, args_info): def _print_json(self, data) -> None: return self._print( json.dumps(data, indent=4, sort_keys=True, ensure_ascii=True, cls=B2CliJsonEncoder), - enforce_output=True + enforce_output=True, ) def _print_human_readable_structure(self, data) -> None: @@ -1122,7 +1117,7 @@ def _print( ) -> None: return self._print_standard_descriptor( self.stdout, - "stdout", + 'stdout', *args, enforce_output=enforce_output, end=end, @@ -1130,7 +1125,7 @@ def _print( def _print_stderr(self, *args, end: str | None = None) -> None: return self._print_standard_descriptor( - self.stderr, "stderr", *args, enforce_output=True, end=end + self.stderr, 'stderr', *args, enforce_output=True, end=end ) def _print_standard_descriptor( @@ -1157,7 +1152,7 @@ def _print_standard_descriptor( descriptor_name, *args, end=end, - sanitize=self.escape_control_characters + sanitize=self.escape_control_characters, ) @classmethod @@ -1168,7 +1163,7 @@ def _print_helper( descriptor_name: str, *args, sanitize: bool = True, - end: str | None = None + end: str | None = None, ): if sanitize: args = tuple(unprintable_to_hex(arg) or '' for arg in args) @@ -1176,15 +1171,12 @@ def _print_helper( descriptor.write(' '.join(args)) except UnicodeEncodeError: sys.stderr.write( - "\nWARNING: Unable to print unicode. Encoding for {} is: '{}'\n".format( - descriptor_name, - descriptor_encoding, - ) + f"\nWARNING: Unable to print unicode. Encoding for {descriptor_name} is: '{descriptor_encoding}'\n" ) args = [arg.encode('ascii', 'backslashreplace').decode() for arg in args] - sys.stderr.write("Trying to print: %s\n" % args) + sys.stderr.write('Trying to print: %s\n' % args) descriptor.write(' '.join(args)) - descriptor.write("\n" if end is None else end) + descriptor.write('\n' if end is None else end) def __str__(self): return f'{self.__class__.__module__}.{self.__class__.__name__}' @@ -1340,15 +1332,13 @@ def _run(self, args): realm = self._get_user_requested_realm(args) if args.applicationKeyId is None: - args.applicationKeyId = ( - os.environ.get(B2_APPLICATION_KEY_ID_ENV_VAR) or - input('Backblaze application key ID: ') + args.applicationKeyId = os.environ.get(B2_APPLICATION_KEY_ID_ENV_VAR) or input( + 'Backblaze application key ID: ' ) if args.applicationKey is None: - args.applicationKey = ( - os.environ.get(B2_APPLICATION_KEY_ENV_VAR) or - getpass.getpass('Backblaze application key: ') + args.applicationKey = os.environ.get(B2_APPLICATION_KEY_ENV_VAR) or getpass.getpass( + 'Backblaze application key: ' ) status = self.authorize(args.applicationKeyId, args.applicationKey, realm) @@ -1369,7 +1359,7 @@ def authorize(self, application_key_id, application_key, realm: str | None): verbose_realm = bool(realm) realm = realm or 'production' url = REALM_URLS.get(realm, realm) - logger.info(f"Using {url}") + logger.info(f'Using {url}') if verbose_realm: self._print_stderr(f'Using {url}') try: @@ -1388,8 +1378,9 @@ def authorize(self, application_key_id, application_key, realm: str | None): if allowed['bucketId'] is not None and allowed['bucketName'] is None: logger.error('ConsoleTool has bucket-restricted key and the bucket does not exist') self._print_stderr( - "ERROR: application key is restricted to bucket id '{}', which no longer exists" - .format(allowed['bucketId']) + "ERROR: application key is restricted to bucket id '{}', which no longer exists".format( + allowed['bucketId'] + ) ) self.api.account_info.clear() return 1 @@ -1467,8 +1458,12 @@ def _run(self, args): class FileServerSideCopyBase( - HeaderFlagsMixin, DestinationSseMixin, SourceSseMixin, FileRetentionSettingMixin, - LegalHoldMixin, Command + HeaderFlagsMixin, + DestinationSseMixin, + SourceSseMixin, + FileRetentionSettingMixin, + LegalHoldMixin, + Command, ): """ Copy a file version to the given bucket (server-side, **not** via download+upload). @@ -1599,7 +1594,9 @@ def _determine_source_metadata( destination_encryption ): # no sse-c, no problem return None, None - if target_file_info is not None or target_content_type is not None: # metadataDirective=REPLACE, no problem + if ( + target_file_info is not None or target_content_type is not None + ): # metadataDirective=REPLACE, no problem return None, None if not fetch_if_necessary: raise ValueError( @@ -1651,15 +1648,13 @@ def _setup_parser(cls, parser): parser, '--cors-rules', type=validated_loads, - help= - "If given, the bucket will have a 'custom' CORS configuration. Accepts a JSON string." + help="If given, the bucket will have a 'custom' CORS configuration. Accepts a JSON string.", ) add_normalized_argument( parser, '--file-lock-enabled', action='store_true', - help= - "If given, the bucket will have the file lock mechanism enabled. This parameter cannot be changed after bucket creation." + help='If given, the bucket will have the file lock mechanism enabled. This parameter cannot be changed after bucket creation.', ) parser.add_argument('--replication', type=validated_loads) add_bucket_name_argument(parser) @@ -1745,7 +1740,7 @@ def _run(self, args): key_name=args.keyName, valid_duration_seconds=args.duration, bucket_id=bucket_id_or_none, - name_prefix=args.name_prefix + name_prefix=args.name_prefix, ) self._print(f'{application_key.id_} {application_key.application_key}') @@ -1829,16 +1824,16 @@ class DownloadCommand( WriteBufferSizeMixin, SkipHashVerificationMixin, Command, - metaclass=ABCMeta + metaclass=ABCMeta, ): - """ helper methods for returning results from download commands """ + """helper methods for returning results from download commands""" def _print_download_info( self, downloaded_file: DownloadedFile, output_filepath: pathlib.Path ) -> None: download_version = downloaded_file.download_version - output_filepath_string = 'stdout' if output_filepath == STDOUT_FILEPATH else str( - output_filepath.resolve() + output_filepath_string = ( + 'stdout' if output_filepath == STDOUT_FILEPATH else str(output_filepath.resolve()) ) self._print_file_attribute('File name', download_version.file_name) self._print_file_attribute('File id', download_version.id_) @@ -1898,7 +1893,7 @@ def _represent_retention(cls, retention: FileRetentionSetting): retention.mode.value, datetime.datetime.fromtimestamp( retention.retain_until / 1000, datetime.timezone.utc - ) + ), ) raise ValueError(f'Unsupported retention mode: {retention.mode}') @@ -2094,7 +2089,7 @@ def _run(self, args): result = b.as_dict() # `files` is a generator. We don't want to collect all of the values from the # generator, as there many be billions of files in a large bucket. - files = b.ls("", latest_only=False, recursive=True) + files = b.ls('', latest_only=False, recursive=True) # `files` yields tuples of (file_version, folder_name). We don't care about # `folder_name`, so just access the first slot of the tuple directly in the # reducer. We can't ask a generator for its size, as the elements are yielded @@ -2530,7 +2525,7 @@ def _run(self, args): if args.long and args.json: raise CommandError('Cannot use --long and --json options together') - if not b2_uri or b2_uri == B2URI(""): + if not b2_uri or b2_uri == B2URI(''): for option_name in ('long', 'recursive', 'replication'): if getattr(args, option_name, False): raise CommandError( @@ -2641,6 +2636,7 @@ class Ls(B2IDOrB2URIMixin, BaseLs): - **listFiles** - **listBuckets** (if bucket name is not provided) """ + ALLOW_ALL_BUCKETS = True @@ -2775,12 +2771,13 @@ def _removal_done(self, future: Future) -> None: def _setup_parser(cls, parser): add_normalized_argument(parser, '--bypass-governance', action='store_true', default=False) add_normalized_argument(parser, '--dry-run', action='store_true') - add_normalized_argument(parser, + add_normalized_argument( + parser, '--queue-size', type=int, default=None, - help='max elements fetched at once for removal, ' \ - 'if left unset defaults to twice the number of threads.', + help='max elements fetched at once for removal, ' + 'if left unset defaults to twice the number of threads.', ) add_normalized_argument(parser, '--no-progress', action='store_true') add_normalized_argument(parser, '--fail-fast', action='store_true') @@ -2807,8 +2804,10 @@ def _run(self, args): event_type, *data = queue_entry if event_type == submit_thread.ERROR_TAG: file_version, error = data - message = f'Deletion of file "{file_version.file_name}" ' \ - f'({file_version.id_}) failed: {str(error)}' + message = ( + f'Deletion of file "{file_version.file_name}" ' + f'({file_version.id_}) failed: {str(error)}' + ) reporter.print_completion(message) failed_on_any_file = True @@ -3124,14 +3123,14 @@ def _setup_parser(cls, parser): '--exclude-if-modified-after', type=parse_millis_from_float_timestamp, default=None, - metavar='TIMESTAMP' + metavar='TIMESTAMP', ) add_normalized_argument( parser, '--exclude-if-uploaded-after', type=parse_millis_from_float_timestamp, default=None, - metavar='TIMESTAMP' + metavar='TIMESTAMP', ) super()._setup_parser(parser) # add parameters from the mixins, and the parent class parser.add_argument('source') @@ -3149,10 +3148,12 @@ def _run(self, args): policies_manager = self.get_policies_manager_from_args(args) if args.threads is not None: - if args.sync_threads != self.DEFAULT_SYNC_THREADS \ - or args.upload_threads != self.DEFAULT_UPLOAD_THREADS \ - or args.download_threads != self.DEFAULT_DOWNLOAD_THREADS: - raise ValueError("--threads cannot be used with other thread options") + if ( + args.sync_threads != self.DEFAULT_SYNC_THREADS + or args.upload_threads != self.DEFAULT_UPLOAD_THREADS + or args.download_threads != self.DEFAULT_DOWNLOAD_THREADS + ): + raise ValueError('--threads cannot be used with other thread options') sync_threads = upload_threads = download_threads = args.threads else: sync_threads = args.sync_threads @@ -3205,7 +3206,7 @@ def _run(self, args): dest_folder=destination, now_millis=current_time_millis(), reporter=reporter, - **kwargs + **kwargs, ) except EmptyDirectory as ex: raise CommandError( @@ -3329,8 +3330,7 @@ def _setup_parser(cls, parser): parser, '--cors-rules', type=validated_loads, - help= - "If given, the bucket will have a 'custom' CORS configuration. Accepts a JSON string." + help="If given, the bucket will have a 'custom' CORS configuration. Accepts a JSON string.", ) add_normalized_argument( parser, @@ -3354,8 +3354,7 @@ def _setup_parser(cls, parser): '--file-lock-enabled', action='store_true', default=None, - help= - "If given, the bucket will have the file lock mechanism enabled. This parameter cannot be changed back." + help='If given, the bucket will have the file lock mechanism enabled. This parameter cannot be changed back.', ) add_bucket_name_argument(parser) parser.add_argument('bucketType', nargs='?', choices=CREATE_BUCKET_TYPES) @@ -3406,7 +3405,7 @@ def _setup_parser(cls, parser): parser, '--min-part-size', type=int, - help="minimum part size in bytes", + help='minimum part size in bytes', default=None, ) super()._setup_parser(parser) # noqa @@ -3420,7 +3419,7 @@ class UploadFileMixin( DestinationSseMixin, LegalHoldMixin, FileRetentionSettingMixin, - metaclass=ABCMeta + metaclass=ABCMeta, ): """ Content type is optional. @@ -3437,27 +3436,26 @@ def _setup_parser(cls, parser): add_normalized_argument( parser, '--content-type', - help="MIME type of the file being uploaded. If not set it will be guessed." + help='MIME type of the file being uploaded. If not set it will be guessed.', ) parser.add_argument( - '--sha1', help="SHA-1 of the data being uploaded for verifying file integrity" + '--sha1', help='SHA-1 of the data being uploaded for verifying file integrity' ) parser.add_argument( '--info', action='append', default=[], - help= - "additional file info to be stored with the file. Can be used multiple times for different information." + help='additional file info to be stored with the file. Can be used multiple times for different information.', ) add_normalized_argument( parser, '--custom-upload-timestamp', type=int, - help="overrides object creation date. Expressed as a number of milliseconds since epoch." + help='overrides object creation date. Expressed as a number of milliseconds since epoch.', ) - add_bucket_name_argument(parser, help="name of the bucket where the file will be stored") - parser.add_argument('localFilePath', help="path of the local file or stream to be uploaded") - parser.add_argument('b2FileName', help="name file will be given when stored in B2") + add_bucket_name_argument(parser, help='name of the bucket where the file will be stored') + parser.add_argument('localFilePath', help='path of the local file or stream to be uploaded') + parser.add_argument('b2FileName', help='name file will be given when stored in B2') super()._setup_parser(parser) # add parameters from the mixins @@ -3465,9 +3463,9 @@ def _run(self, args): self._set_threads_from_args(args) upload_kwargs = self.get_execute_kwargs(args) file_info = self.execute_operation(**upload_kwargs) - bucket = upload_kwargs["bucket"] - self._print("URL by file name: " + bucket.get_download_url(file_info.file_name)) - self._print("URL by fileId: " + self.api.get_download_url_for_fileid(file_info.id_)) + bucket = upload_kwargs['bucket'] + self._print('URL by file name: ' + bucket.get_download_url(file_info.file_name)) + self._print('URL by fileId: ' + self.api.get_download_url_for_fileid(file_info.id_)) self._print_json(file_info) return 0 @@ -3480,7 +3478,7 @@ def get_execute_kwargs(self, args) -> dict: except OSError: if not points_to_fifo(pathlib.Path(args.localFilePath)): self._print_stderr( - "WARNING: Unable to determine file modification timestamp. " + 'WARNING: Unable to determine file modification timestamp. ' f"{SRC_LAST_MODIFIED_MILLIS!r} file info won't be set." ) else: @@ -3489,32 +3487,21 @@ def get_execute_kwargs(self, args) -> dict: file_infos = self._file_info_with_header_args(args, file_infos) return { - "bucket": - self.api.get_bucket_by_name(args.bucketName), - "content_type": - args.content_type, - "custom_upload_timestamp": - args.custom_upload_timestamp, - "encryption": - self._get_destination_sse_setting(args), - "file_info": - file_infos, - "file_name": - args.b2FileName, - "file_retention": - self._get_file_retention_setting(args), - "legal_hold": - self._get_legal_hold_setting(args), - "local_file": - args.localFilePath, - "min_part_size": - args.min_part_size, - "progress_listener": - self.make_progress_listener(args.localFilePath, args.no_progress or args.quiet), - "sha1_sum": - args.sha1, - "threads": - self._get_threads_from_args(args), + 'bucket': self.api.get_bucket_by_name(args.bucketName), + 'content_type': args.content_type, + 'custom_upload_timestamp': args.custom_upload_timestamp, + 'encryption': self._get_destination_sse_setting(args), + 'file_info': file_infos, + 'file_name': args.b2FileName, + 'file_retention': self._get_file_retention_setting(args), + 'legal_hold': self._get_legal_hold_setting(args), + 'local_file': args.localFilePath, + 'min_part_size': args.min_part_size, + 'progress_listener': self.make_progress_listener( + args.localFilePath, args.no_progress or args.quiet + ), + 'sha1_sum': args.sha1, + 'threads': self._get_threads_from_args(args), } @abstractmethod @@ -3525,15 +3512,15 @@ def upload_file_kwargs_to_unbound_upload(self, **kwargs): """ Translate `file upload` kwargs to unbound_upload equivalents """ - kwargs["large_file_sha1"] = kwargs.pop("sha1_sum", None) - kwargs["buffers_count"] = kwargs["threads"] + 1 - kwargs["read_size"] = kwargs["min_part_size"] or DEFAULT_MIN_PART_SIZE + kwargs['large_file_sha1'] = kwargs.pop('sha1_sum', None) + kwargs['buffers_count'] = kwargs['threads'] + 1 + kwargs['read_size'] = kwargs['min_part_size'] or DEFAULT_MIN_PART_SIZE return kwargs def get_input_stream(self, filename: str) -> str | int | io.BinaryIO: """Get input stream IF filename points to a FIFO or stdin.""" - if filename == "-": - return sys.stdin.buffer if platform.system() == "Windows" else sys.stdin.fileno() + if filename == '-': + return sys.stdin.buffer if platform.system() == 'Windows' else sys.stdin.fileno() elif points_to_fifo(pathlib.Path(filename)): return filename @@ -3543,7 +3530,7 @@ def file_identifier_to_read_stream(self, file_id: str | int | BinaryIO, bufferin if isinstance(file_id, (str, int)): return open( file_id, - mode="rb", + mode='rb', closefd=not isinstance(file_id, int), buffering=buffering, ) @@ -3589,7 +3576,7 @@ class FileUploadBase(UploadFileMixin, UploadModeMixin, Command): def get_execute_kwargs(self, args) -> dict: kwargs = super().get_execute_kwargs(args) - kwargs["upload_mode"] = self._get_upload_mode_from_args(args) + kwargs['upload_mode'] = self._get_upload_mode_from_args(args) return kwargs def execute_operation(self, local_file, bucket, threads, **kwargs): @@ -3598,14 +3585,14 @@ def execute_operation(self, local_file, bucket, threads, **kwargs): except self.NotAnInputStream: # it is a regular file file_version = bucket.upload_local_file(local_file=local_file, **kwargs) else: - if kwargs.pop("upload_mode", None) != UploadMode.FULL: + if kwargs.pop('upload_mode', None) != UploadMode.FULL: self._print_stderr( - "WARNING: Ignoring upload mode setting as we are uploading a stream." + 'WARNING: Ignoring upload mode setting as we are uploading a stream.' ) kwargs = self.upload_file_kwargs_to_unbound_upload(threads=threads, **kwargs) - del kwargs["threads"] + del kwargs['threads'] input_stream = self.file_identifier_to_read_stream( - input_stream, kwargs["min_part_size"] or DEFAULT_MIN_PART_SIZE + input_stream, kwargs['min_part_size'] or DEFAULT_MIN_PART_SIZE ) with input_stream: file_version = bucket.upload_unbound_stream(read_only_object=input_stream, **kwargs) @@ -3654,7 +3641,7 @@ def _setup_parser(cls, parser): '--part-size', type=int, default=None, - help=("part size in bytes. Must be in range of "), + help=('part size in bytes. Must be in range of '), ) add_normalized_argument( parser, @@ -3662,8 +3649,8 @@ def _setup_parser(cls, parser): type=float, default=3600.0, help=( - "maximum time in seconds that not a single part may sit in the queue," - " waiting to be uploaded, before an error is returned" + 'maximum time in seconds that not a single part may sit in the queue,' + ' waiting to be uploaded, before an error is returned' ), ) super()._setup_parser(parser) @@ -3671,8 +3658,8 @@ def _setup_parser(cls, parser): def get_execute_kwargs(self, args) -> dict: kwargs = super().get_execute_kwargs(args) kwargs = self.upload_file_kwargs_to_unbound_upload(**kwargs) - kwargs["recommended_upload_part_size"] = args.part_size - kwargs["unused_buffer_timeout_seconds"] = args.unused_buffer_timeout_seconds + kwargs['recommended_upload_part_size'] = args.part_size + kwargs['unused_buffer_timeout_seconds'] = args.unused_buffer_timeout_seconds return kwargs def execute_operation(self, local_file, bucket, threads, **kwargs): @@ -3680,14 +3667,14 @@ def execute_operation(self, local_file, bucket, threads, **kwargs): input_stream = self.get_input_stream(local_file) except self.NotAnInputStream: # it is a regular file self._print_stderr( - "WARNING: You are using a stream upload command to upload a regular file. " - "While it will work, it is inefficient. " - "Use of `file upload` command is recommended." + 'WARNING: You are using a stream upload command to upload a regular file. ' + 'While it will work, it is inefficient. ' + 'Use of `file upload` command is recommended.' ) input_stream = local_file input_stream = self.file_identifier_to_read_stream( - input_stream, kwargs["min_part_size"] or DEFAULT_MIN_PART_SIZE + input_stream, kwargs['min_part_size'] or DEFAULT_MIN_PART_SIZE ) with input_stream: file_version = bucket.upload_unbound_stream(read_only_object=input_stream, **kwargs) @@ -3731,14 +3718,14 @@ def _setup_parser(cls, parser): parser, '--file-retention-mode', default=None, - choices=(RetentionMode.COMPLIANCE.value, RetentionMode.GOVERNANCE.value, 'none') + choices=(RetentionMode.COMPLIANCE.value, RetentionMode.GOVERNANCE.value, 'none'), ) add_normalized_argument( parser, '--retain-until', type=parse_millis_from_float_timestamp, metavar='TIMESTAMP', - default=None + default=None, ) add_normalized_argument(parser, '--bypass-governance', action='store_true', default=False) @@ -3823,14 +3810,14 @@ def _setup_parser(cls, parser): super()._setup_parser(parser) parser.add_argument( 'retentionMode', - choices=(RetentionMode.GOVERNANCE.value, RetentionMode.COMPLIANCE.value, 'none') + choices=(RetentionMode.GOVERNANCE.value, RetentionMode.COMPLIANCE.value, 'none'), ) add_normalized_argument( parser, '--retain-until', type=parse_millis_from_float_timestamp, metavar='TIMESTAMP', - default=None + default=None, ) add_normalized_argument(parser, '--bypass-governance', action='store_true', default=False) @@ -3876,8 +3863,7 @@ def _setup_parser(cls, parser): add_normalized_argument( parser, '--priority', - help= - 'priority for the new replication rule on the source side [%d-%d]. Will be set automatically when not specified.' + help='priority for the new replication rule on the source side [%d-%d]. Will be set automatically when not specified.' % ( ReplicationRule.MIN_PRIORITY, ReplicationRule.MAX_PRIORITY, @@ -3889,13 +3875,13 @@ def _setup_parser(cls, parser): parser, '--file-name-prefix', metavar='PREFIX', - help='only replicate files starting with PREFIX' + help='only replicate files starting with PREFIX', ) add_normalized_argument( parser, '--include-existing-files', action='store_true', - help='if given, also replicates files uploaded prior to creation of the replication rule' + help='if given, also replicates files uploaded prior to creation of the replication rule', ) def _run(self, args): @@ -3907,8 +3893,9 @@ def _run(self, args): helper = ReplicationSetupHelper() helper.setup_both( source_bucket=self.api.get_bucket_by_name(args.source).get_fresh_state(), - destination_bucket=destination_api.get_bucket_by_name(args.destination - ).get_fresh_state(), + destination_bucket=destination_api.get_bucket_by_name( + args.destination + ).get_fresh_state(), name=args.name, priority=args.priority, prefix=args.file_name_prefix, @@ -3937,7 +3924,7 @@ def _run(self, args): @classmethod def alter_rule_by_name(cls, bucket: Bucket, name: str) -> tuple[bool, bool]: - """ returns False if rule could not be found """ + """returns False if rule could not be found""" if not bucket.replication or not bucket.replication.rules: return False, False @@ -3974,7 +3961,7 @@ def alter_rule_by_name(cls, bucket: Bucket, name: str) -> tuple[bool, bool]: @classmethod @abstractmethod def alter_one_rule(cls, rule: ReplicationRule) -> ReplicationRule | None: - """ return None to delete a rule """ + """return None to delete a rule""" pass @@ -3990,7 +3977,7 @@ class ReplicationDeleteBase(ReplicationRuleChanger): @classmethod def alter_one_rule(cls, rule: ReplicationRule) -> ReplicationRule | None: - """ return None to delete rule """ + """return None to delete rule""" return None @@ -4006,7 +3993,7 @@ class ReplicationPauseBase(ReplicationRuleChanger): @classmethod def alter_one_rule(cls, rule: ReplicationRule) -> ReplicationRule | None: - """ return None to delete rule """ + """return None to delete rule""" rule.is_enabled = False return rule @@ -4023,7 +4010,7 @@ class ReplicationUnpauseBase(ReplicationRuleChanger): @classmethod def alter_one_rule(cls, rule: ReplicationRule) -> ReplicationRule | None: - """ return None to delete rule """ + """return None to delete rule""" rule.is_enabled = True return rule @@ -4066,7 +4053,7 @@ def _setup_parser(cls, parser): '--columns', default=['all'], type=lambda value: re.split(r', ?', value), - metavar='COLUMN ONE,COLUMN TWO' + metavar='COLUMN ONE,COLUMN TWO', ) def _run(self, args): @@ -4104,7 +4091,8 @@ def _run(self, args): results = { rule_name: self.filter_results_columns( rule_results, - [column.replace(' ', '_') for column in args.columns + [ + column.replace(' ', '_') for column in args.columns ], # allow users to use spaces instead of underscores ) for rule_name, rule_results in results.items() @@ -4123,8 +4111,12 @@ def _run(self, args): @classmethod def get_results_for_rule( - cls, bucket: Bucket, rule: ReplicationRule, destination_api: B2Api | None, - scan_destination: bool, quiet: bool + cls, + bucket: Bucket, + rule: ReplicationRule, + destination_api: B2Api | None, + scan_destination: bool, + quiet: bool, ) -> list[dict]: monitor = ReplicationMonitor( bucket=bucket, @@ -4138,7 +4130,8 @@ def get_results_for_rule( { **dataclasses.asdict(result), 'count': count, - } for result, count in report.counter_by_status.items() + } + for result, count in report.counter_by_status.items() ] @classmethod @@ -4169,12 +4162,12 @@ def output_console(self, results: dict[str, list[dict]]) -> None: key.replace('_', '\n'): # split key to minimize column size self.to_human_readable(value) for key, value in result.items() - } for result in rule_results + } + for result in rule_results ] self._print(tabulate(rule_results, headers='keys', tablefmt='grid')) def output_csv(self, results: dict[str, list[dict]]) -> None: - rows = [] for rule_name, rule_results in results.items(): @@ -4186,7 +4179,8 @@ def output_csv(self, results: dict[str, list[dict]]) -> None: self.to_human_readable(value) for key, value in result.items() }, - } for result in rule_results + } + for result in rule_results ] if not rows: @@ -4223,6 +4217,7 @@ class License(Command): # pragma: no cover Displays the license of B2 Command line tool and all libraries shipped with it. """ + LICENSE_OUTPUT_FILE = pathlib.Path(__file__).parent.parent / 'licenses_output.txt' REQUIRES_AUTH = False @@ -4236,24 +4231,15 @@ class License(Command): # pragma: no cover MODULES_TO_OVERRIDE_LICENSE_TEXT = {'rst2ansi', 'b2sdk'} LICENSES = { - 'argcomplete': - 'https://raw.githubusercontent.com/kislyuk/argcomplete/develop/LICENSE.rst', - 'atomicwrites': - 'https://raw.githubusercontent.com/untitaker/python-atomicwrites/master/LICENSE', - 'platformdirs': - 'https://raw.githubusercontent.com/platformdirs/platformdirs/main/LICENSE.txt', - 'PTable': - 'https://raw.githubusercontent.com/jazzband/prettytable/main/LICENSE', - 'pipx': - 'https://raw.githubusercontent.com/pypa/pipx/main/LICENSE', - 'userpath': - 'https://raw.githubusercontent.com/ofek/userpath/master/LICENSE.txt', - 'future': - 'https://raw.githubusercontent.com/PythonCharmers/python-future/master/LICENSE.txt', - 'pefile': - 'https://raw.githubusercontent.com/erocarrera/pefile/master/LICENSE', - 'https://github.com/python/typeshed': - 'https://raw.githubusercontent.com/python/typeshed/main/LICENSE', + 'argcomplete': 'https://raw.githubusercontent.com/kislyuk/argcomplete/develop/LICENSE.rst', + 'atomicwrites': 'https://raw.githubusercontent.com/untitaker/python-atomicwrites/master/LICENSE', + 'platformdirs': 'https://raw.githubusercontent.com/platformdirs/platformdirs/main/LICENSE.txt', + 'PTable': 'https://raw.githubusercontent.com/jazzband/prettytable/main/LICENSE', + 'pipx': 'https://raw.githubusercontent.com/pypa/pipx/main/LICENSE', + 'userpath': 'https://raw.githubusercontent.com/ofek/userpath/master/LICENSE.txt', + 'future': 'https://raw.githubusercontent.com/PythonCharmers/python-future/master/LICENSE.txt', + 'pefile': 'https://raw.githubusercontent.com/erocarrera/pefile/master/LICENSE', + 'https://github.com/python/typeshed': 'https://raw.githubusercontent.com/python/typeshed/main/LICENSE', } class NormalizingStringIO(io.StringIO): @@ -4313,8 +4299,9 @@ def _put_license_text(self, stream: io.StringIO, with_packages: bool = False): files_table.add_row([file_name, file_content]) stream.write(str(files_table)) stream.write(f'\n\n{b2_call_name} license:\n') - b2_license_file_text = (pathlib.Path(__file__).parent.parent / - 'LICENSE').read_text(encoding='utf8') + b2_license_file_text = (pathlib.Path(__file__).parent.parent / 'LICENSE').read_text( + encoding='utf8' + ) stream.write(b2_license_file_text) def _put_license_text_for_packages(self, stream: io.StringIO): @@ -4357,13 +4344,13 @@ def _put_license_text_for_packages(self, stream: io.StringIO): def _get_licenses_dicts(cls) -> list[dict]: assert piplicenses, 'In order to run this command, you need to install the `license` extra: pip install b2[license]' pipdeptree_run = subprocess.run( - ["pipdeptree", "--json", "-p", "b2"], + ['pipdeptree', '--json', '-p', 'b2'], capture_output=True, text=True, check=True, ) pipdeptree = json.loads(pipdeptree_run.stdout) - used_packages = [dep["package"]['package_name'] for dep in pipdeptree] + used_packages = [dep['package']['package_name'] for dep in pipdeptree] parser = piplicenses.create_parser() args = parser.parse_args( @@ -4477,6 +4464,7 @@ class BucketNotificationRuleBase(BucketNotificationRuleWarningMixin, Command): {NAME} bucket notification-rule update b2://bucketName/newPath/ ruleName --disable --event-type "b2:ObjectCreated:*" --event-type "b2:ObjectHidden:*" {NAME} bucket notification-rule delete b2://bucketName ruleName """ + subcommands_registry = ClassRegistry(attr_name='COMMAND_NAME') @@ -4500,14 +4488,14 @@ class BucketNotificationRuleList(JSONOptionMixin, BucketNotificationRuleWarningM - **readBucketNotifications** """ + COMMAND_NAME = 'list' @classmethod def _setup_parser(cls, parser): add_b2_uri_argument( parser, - help= - "B2 URI of the bucket with optional path prefix, e.g. b2://bucketName or b2://bucketName/optionalSubPath/" + help='B2 URI of the bucket with optional path prefix, e.g. b2://bucketName or b2://bucketName/optionalSubPath/', ) super()._setup_parser(parser) @@ -4515,10 +4503,11 @@ def _run(self, args): bucket = self.api.get_bucket_by_name(args.B2_URI.bucket_name) rules = sorted( ( - rule for rule in bucket.get_notification_rules() - if rule["objectNamePrefix"].startswith(args.B2_URI.path) + rule + for rule in bucket.get_notification_rules() + if rule['objectNamePrefix'].startswith(args.B2_URI.path) ), - key=lambda rule: rule["name"] + key=lambda rule: rule['name'], ) if args.json: self._print_json(rules) @@ -4546,43 +4535,40 @@ def _validate_secret(cls, value: str) -> str: def setup_rule_fields_parser(cls, parser, creation: bool): add_b2_uri_argument( parser, - help= - "B2 URI of the bucket with optional path prefix, e.g. b2://bucketName or b2://bucketName/optionalSubPath/" + help='B2 URI of the bucket with optional path prefix, e.g. b2://bucketName or b2://bucketName/optionalSubPath/', ) - parser.add_argument('ruleName', help="Name of the rule") + parser.add_argument('ruleName', help='Name of the rule') parser.add_argument( '--event-type', action='append', - help= - "Events scope, e.g., 'b2:ObjectCreated:*'. Can be used multiple times to set multiple scopes.", - required=creation + help="Events scope, e.g., 'b2:ObjectCreated:*'. Can be used multiple times to set multiple scopes.", + required=creation, ) parser.add_argument( - '--webhook-url', help="URL to send the notification to", required=creation + '--webhook-url', help='URL to send the notification to', required=creation ) parser.add_argument( '--sign-secret', - help="optional signature key consisting of 32 alphanumeric characters ", + help='optional signature key consisting of 32 alphanumeric characters ', type=cls._validate_secret, default=None, ) parser.add_argument( '--custom-header', action='append', - help= - "Custom header to be sent with the notification. Can be used multiple times to set multiple headers. Format: HEADER_NAME=VALUE" + help='Custom header to be sent with the notification. Can be used multiple times to set multiple headers. Format: HEADER_NAME=VALUE', ) parser.add_argument( '--enable', action='store_true', - help="Flag to enable the notification rule", - default=None + help='Flag to enable the notification rule', + default=None, ) parser.add_argument( '--disable', action='store_false', - help="Flag to disable the notification rule", - dest='enable' + help='Flag to disable the notification rule', + dest='enable', ) def get_rule_from_args(self, args): @@ -4601,12 +4587,11 @@ def get_rule_from_args(self, args): 'eventTypes': args.event_type, 'isEnabled': args.enable, 'objectNamePrefix': args.B2_URI.path, - 'targetConfiguration': - { - 'url': args.webhook_url, - 'customHeaders': custom_headers, - 'hmacSha256SigningSecret': args.sign_secret, - }, + 'targetConfiguration': { + 'url': args.webhook_url, + 'customHeaders': custom_headers, + 'hmacSha256SigningSecret': args.sign_secret, + }, } return filter_out_empty_values(rule) @@ -4620,7 +4605,7 @@ def print_rule(self, args, rule): class BucketNotificationRuleUpdateBase(BucketNotificationRuleCreateBase): def _run(self, args): bucket = self.api.get_bucket_by_name(args.B2_URI.bucket_name) - rules_by_name = {rule["name"]: rule for rule in bucket.get_notification_rules()} + rules_by_name = {rule['name']: rule for rule in bucket.get_notification_rules()} rule = rules_by_name.get(args.ruleName) if not rule: raise CommandError( @@ -4636,7 +4621,7 @@ def _run(self, args): rules = bucket.set_notification_rules( [notification_rule_response_to_request(rule) for rule in rules_by_name.values()] ) - rule = next(rule for rule in rules if rule["name"] == args.ruleName) + rule = next(rule for rule in rules if rule['name'] == args.ruleName) self.print_rule(args=args, rule=rule) return 0 @@ -4660,6 +4645,7 @@ class BucketNotificationRuleCreate(BucketNotificationRuleCreateBase): - **readBucketNotifications** - **writeBucketNotifications** """ + COMMAND_NAME = 'create' NEW_RULE_DEFAULTS = { @@ -4677,7 +4663,7 @@ def _setup_parser(cls, parser): def _run(self, args): bucket = self.api.get_bucket_by_name(args.B2_URI.bucket_name) - rules_by_name = {rule["name"]: rule for rule in bucket.get_notification_rules()} + rules_by_name = {rule['name']: rule for rule in bucket.get_notification_rules()} if args.ruleName in rules_by_name: raise CommandError( f'rule with name {args.ruleName!r} already exists on bucket {bucket.name!r}' @@ -4692,10 +4678,10 @@ def _run(self, args): rules = bucket.set_notification_rules( [ notification_rule_response_to_request(rule) - for rule in sorted(rules_by_name.values(), key=lambda r: r["name"]) + for rule in sorted(rules_by_name.values(), key=lambda r: r['name']) ] ) - rule = next(rule for rule in rules if rule["name"] == args.ruleName) + rule = next(rule for rule in rules if rule['name'] == args.ruleName) self.print_rule(args=args, rule=rule) return 0 @@ -4754,13 +4740,13 @@ class BucketNotificationRuleEnable(BucketNotificationRuleUpdateBase): @classmethod def _setup_parser(cls, parser): add_b2_uri_argument( - parser, help="B2 URI of the bucket to enable the rule for, e.g. b2://bucketName" + parser, help='B2 URI of the bucket to enable the rule for, e.g. b2://bucketName' ) - parser.add_argument('ruleName', help="Name of the rule to enable") + parser.add_argument('ruleName', help='Name of the rule to enable') super()._setup_parser(parser) def get_rule_from_args(self, args): - logger.warning("WARNING: ignoring path from %r", args.B2_URI) + logger.warning('WARNING: ignoring path from %r', args.B2_URI) return {'name': args.ruleName, 'isEnabled': True} @@ -4789,13 +4775,13 @@ class BucketNotificationRuleDisable(BucketNotificationRuleUpdateBase): @classmethod def _setup_parser(cls, parser): add_b2_uri_argument( - parser, help="B2 URI of the bucket to enable the rule for, e.g. b2://bucketName" + parser, help='B2 URI of the bucket to enable the rule for, e.g. b2://bucketName' ) - parser.add_argument('ruleName', help="Name of the rule to enable") + parser.add_argument('ruleName', help='Name of the rule to enable') super()._setup_parser(parser) def get_rule_from_args(self, args): - logger.warning("WARNING: ignoring path from %r", args.B2_URI) + logger.warning('WARNING: ignoring path from %r', args.B2_URI) return {'name': args.ruleName, 'isEnabled': False} @@ -4815,14 +4801,14 @@ class BucketNotificationRuleDelete(Command): @classmethod def _setup_parser(cls, parser): add_b2_uri_argument( - parser, help="B2 URI of the bucket to delete the rule from, e.g. b2://bucketName" + parser, help='B2 URI of the bucket to delete the rule from, e.g. b2://bucketName' ) - parser.add_argument('ruleName', help="Name of the rule to delete") + parser.add_argument('ruleName', help='Name of the rule to delete') super()._setup_parser(parser) def _run(self, args): bucket = self.api.get_bucket_by_name(args.B2_URI.bucket_name) - rules_by_name = {rule["name"]: rule for rule in bucket.get_notification_rules()} + rules_by_name = {rule['name']: rule for rule in bucket.get_notification_rules()} try: del rules_by_name[args.ruleName] @@ -4852,6 +4838,7 @@ class Key(Command): {NAME} key create my-key listFiles,deleteFiles {NAME} key delete 005c398ac3212400000000010 """ + subcommands_registry = ClassRegistry(attr_name='COMMAND_NAME') @@ -4904,6 +4891,7 @@ class Replication(Command): {NAME} replication unpause src-bucket my-repl-rule {NAME} replication delete src-bucket my-repl-rule """ + subcommands_registry = ClassRegistry(attr_name='COMMAND_NAME') @@ -4976,6 +4964,7 @@ class Account(Command): {NAME} account get {NAME} account clear """ + subcommands_registry = ClassRegistry(attr_name='COMMAND_NAME') @@ -5029,8 +5018,9 @@ class BucketCmd(Command): {NAME} bucket delete {NAME} bucket get-download-auth """ + # to avoid conflicts with the Bucket class this class is named BucketCmd - COMMAND_NAME = "bucket" + COMMAND_NAME = 'bucket' subcommands_registry = ClassRegistry(attr_name='COMMAND_NAME') @@ -5131,6 +5121,7 @@ class File(Command): {NAME} file upload yourBucket localFile.txt file.txt {NAME} file url b2://yourBucket/file.txt """ + subcommands_registry = ClassRegistry(attr_name='COMMAND_NAME') @@ -5171,8 +5162,8 @@ class FileServerSideCopy(FileServerSideCopyBase): @classmethod def _setup_parser(cls, parser): - add_b2id_or_file_like_b2_uri_argument(parser, "sourceB2Uri") - add_b2id_or_file_like_b2_uri_argument(parser, "destinationB2Uri", by_id=False) + add_b2id_or_file_like_b2_uri_argument(parser, 'sourceB2Uri') + add_b2id_or_file_like_b2_uri_argument(parser, 'destinationB2Uri', by_id=False) super()._setup_parser(parser) def get_source_b2_uri(self, args) -> B2URIBase: @@ -5310,6 +5301,7 @@ class FileLarge(Command): {NAME} file large unfinished cancel b2://yourBucket {NAME} file large unfinished cancel b2id://yourFileId """ + COMMAND_NAME = 'large' subcommands_registry = ClassRegistry(attr_name='COMMAND_NAME') @@ -5335,6 +5327,7 @@ class FileLargeUnfinished(Command): {NAME} file large unfinished cancel b2://yourBucket {NAME} file large unfinished cancel b2id://yourFileId """ + COMMAND_NAME = 'unfinished' subcommands_registry = ClassRegistry(attr_name='COMMAND_NAME') @@ -5376,6 +5369,7 @@ class CancelAllUnfinishedLargeFiles( - **listFiles** - **writeFiles** """ + replaced_by_cmd = (File, FileLarge, FileLargeUnfinished, FileLargeUnfinishedCancel) @@ -5390,6 +5384,7 @@ class CancelLargeFile(CmdReplacedByMixin, B2URIFileIDArgMixin, FileLargeUnfinish - **writeFiles** """ + replaced_by_cmd = (File, FileLarge, FileLargeUnfinished, FileLargeUnfinishedCancel) @@ -5414,7 +5409,7 @@ def _get_default_escape_cc_setting(self): return int(escape_cc_env_var) == 1 else: logger.warning( - "WARNING: invalid value for {B2_ESCAPE_CONTROL_CHARACTERS} environment variable, available options are 0 or 1 - will assume variable is not set" + 'WARNING: invalid value for {B2_ESCAPE_CONTROL_CHARACTERS} environment variable, available options are 0 or 1 - will assume variable is not set' ) return self.stdout.isatty() @@ -5462,7 +5457,7 @@ def run_command(self, argv): except MissingAccountData as e: logger.exception('ConsoleTool missing account data error') self._print_stderr( - f'ERROR: {e} Use: \'{self.b2_binary_name} account authorize\' or provide auth data with ' + f"ERROR: {e} Use: '{self.b2_binary_name} account authorize' or provide auth data with " f'{B2_APPLICATION_KEY_ID_ENV_VAR!r} and {B2_APPLICATION_KEY_ENV_VAR!r} environment variables' ) return 1 @@ -5504,7 +5499,6 @@ def _initialize_b2_api(cls, args: argparse.Namespace, kwargs: dict) -> B2Api: return b2_api or _get_b2api_for_profile(profile=args.profile, **kwargs) def authorize_from_env(self) -> int: - key_id, key = get_keyid_and_key_from_env_vars() if key_id is None and key is None: @@ -5566,11 +5560,12 @@ def _setup_logging(cls, args, argv): logger.info(r'// %s %s %s \\', SEPARATOR, VERSION.center(8), SEPARATOR) logger.debug('platform is %s', platform.platform()) - if os.environ.get(B2_CLI_DOCKER_ENV_VAR) == "1": + if os.environ.get(B2_CLI_DOCKER_ENV_VAR) == '1': logger.debug('running as a Docker container') logger.debug( - 'Python version is %s %s', platform.python_implementation(), - sys.version.replace('\n', ' ') + 'Python version is %s %s', + platform.python_implementation(), + sys.version.replace('\n', ' '), ) logger.debug('b2sdk version is %s', b2sdk_version) logger.debug('locale is %s', locale.getlocale()) diff --git a/doc/source/conf.py b/doc/source/conf.py index d313eec0a..66e068091 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -58,8 +58,12 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', - 'sphinx.ext.coverage', 'sphinxarg.ext' + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', + 'sphinx.ext.ifconfig', + 'sphinx.ext.viewcode', + 'sphinx.ext.coverage', + 'sphinxarg.ext', ] # Add any paths that contain templates here, relative to this directory. @@ -77,7 +81,7 @@ # General information about the project. project = 'B2_Command_Line_Tool' -year = datetime.date.today().strftime("%Y") +year = datetime.date.today().strftime('%Y') author = 'Backblaze' copyright = f'{year}, {author}' @@ -95,7 +99,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = "en" +language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -129,12 +133,12 @@ 'exclude-members': '__weakref__, _abc_cache, _abc_negative_cache, _abc_negative_cache_version, _abc_registry, _abc_impl', 'members': True, 'undoc-members': True, -} # yapf: disable +} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -#html_static_path = ['_static'] +# html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -142,14 +146,13 @@ # This is required for the alabaster theme # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars html_sidebars = { - '**': - [ - 'about.html', - 'navigation.html', - 'relations.html', # needs 'show_related': True theme option to display - 'searchbox.html', - 'donate.html', - ] + '**': [ + 'about.html', + 'navigation.html', + 'relations.html', # needs 'show_related': True theme option to display + 'searchbox.html', + 'donate.html', + ] } # -- Options for HTMLHelp output ------------------------------------------ @@ -163,15 +166,12 @@ # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # # 'preamble': '', - # Latex figure (float) alignment # # 'figure_align': 'htbp', @@ -182,8 +182,11 @@ # author, documentclass [howto, manual, or own class]). latex_documents = [ ( - master_doc, 'B2_Command_Line_Tool.tex', 'B2\\_Command\\_Line\\_Tool Documentation', - 'Backblaze', 'manual' + master_doc, + 'B2_Command_Line_Tool.tex', + 'B2\\_Command\\_Line\\_Tool Documentation', + 'Backblaze', + 'manual', ), ] @@ -202,8 +205,13 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, 'B2_Command_Line_Tool', 'B2_Command_Line_Tool Documentation', author, - 'B2_Command_Line_Tool', 'One line description of project.', 'Miscellaneous' + master_doc, + 'B2_Command_Line_Tool', + 'B2_Command_Line_Tool Documentation', + author, + 'B2_Command_Line_Tool', + 'One line description of project.', + 'Miscellaneous', ), ] @@ -249,7 +257,7 @@ def regenerate_subcommands_help(): all_commands: list[tuple[tuple[str, ...], type]] = [] def _add_cmd(path, cmd_cls): - if getattr(cmd_cls, "deprecated", False): + if getattr(cmd_cls, 'deprecated', False): return registry = cmd_cls.subcommands_registry @@ -262,18 +270,18 @@ def _add_cmd(path, cmd_cls): _add_cmd((), B2) - subcommands_dir_target = pathlib.Path(__file__).parent / "subcommands" + subcommands_dir_target = pathlib.Path(__file__).parent / 'subcommands' with tempfile.TemporaryDirectory() as temp_dir: - subcommands_dir = pathlib.Path(temp_dir) / "subcommands" + subcommands_dir = pathlib.Path(temp_dir) / 'subcommands' subcommands_dir.mkdir() for command_path, cmd_cls in sorted(all_commands): - full_command = " ".join(command_path) - slug = full_command.replace(" ", "_") - (subcommands_dir / f"{slug}.rst").write_text( + full_command = ' '.join(command_path) + slug = full_command.replace(' ', '_') + (subcommands_dir / f'{slug}.rst').write_text( tmpl.format( HUMAN_NAME=full_command, COMMAND=full_command, - slug=f"subcommand_{slug}", + slug=f'subcommand_{slug}', ) ) diff --git a/noxfile.py b/noxfile.py index 6270dcee2..0fa24ad6a 100644 --- a/noxfile.py +++ b/noxfile.py @@ -21,8 +21,8 @@ import nox # Required for PDM to use nox's virtualenvs -os.environ["PDM_IGNORE_SAVED_PYTHON"] = "1" -os.environ["PDM_NO_LOCK"] = "1" +os.environ['PDM_IGNORE_SAVED_PYTHON'] = '1' +os.environ['PDM_NO_LOCK'] = '1' UPSTREAM_REPO_URL = 'git@github.com:Backblaze/B2_Command_Line_Tool.git' @@ -32,30 +32,34 @@ NO_STATICX = os.environ.get('NO_STATICX') is not None NOX_PYTHONS = os.environ.get('NOX_PYTHONS') -PYTHON_VERSIONS = [ - 'pypy3.9', - 'pypy3.10', - '3.8', - '3.9', - '3.10', - '3.11', - '3.12', - '3.13', -] if NOX_PYTHONS is None else NOX_PYTHONS.split(',') +PYTHON_VERSIONS = ( + [ + 'pypy3.9', + 'pypy3.10', + '3.8', + '3.9', + '3.10', + '3.11', + '3.12', + '3.13', + ] + if NOX_PYTHONS is None + else NOX_PYTHONS.split(',') +) def _detect_python_nox_id() -> str: major, minor, *_ = platform.python_version_tuple() - python_nox_id = f"{major}.{minor}" + python_nox_id = f'{major}.{minor}' if platform.python_implementation() == 'PyPy': - python_nox_id = f"pypy{python_nox_id}" + python_nox_id = f'pypy{python_nox_id}' return python_nox_id if CI and not NOX_PYTHONS: # this is done to allow it to work even if `nox -p` was passed to nox PYTHON_VERSIONS = [_detect_python_nox_id()] - print(f"CI job mode; using provided interpreter only; PYTHON_VERSIONS={PYTHON_VERSIONS!r}") + print(f'CI job mode; using provided interpreter only; PYTHON_VERSIONS={PYTHON_VERSIONS!r}') PYTHON_DEFAULT_VERSION = PYTHON_VERSIONS[-2] if len(PYTHON_VERSIONS) > 1 else PYTHON_VERSIONS[0] @@ -76,7 +80,7 @@ def _detect_python_nox_id() -> str: PYTEST_GLOBAL_ARGS = [] if CI: - PYTEST_GLOBAL_ARGS.append("-vv") + PYTEST_GLOBAL_ARGS.append('-vv') def pdm_install( @@ -97,10 +101,10 @@ def pdm_install( def github_output(name, value, *, secret=False): gh_output_path = os.environ.get('GITHUB_OUTPUT') if secret: - print(f"::add-mask::{value}") + print(f'::add-mask::{value}') if gh_output_path: - with open(gh_output_path, "a") as file: - file.write(f"{name}={value}\n") + with open(gh_output_path, 'a') as file: + file.write(f'{name}={value}\n') else: print(f"github_output {name}={'******' if secret else value}") @@ -126,10 +130,12 @@ def get_versions() -> list[str]: # - the first element is the latest unstable version (starts with an underscore) # - the last element is the latest stable version (highest version number) return [ - path.name for path in sorted( + path.name + for path in sorted( (pathlib.Path(__file__).parent / 'b2' / '_internal').glob('*b2v*'), key=get_version_key, - ) if (path / '__init__.py').exists() + ) + if (path / '__init__.py').exists() ] @@ -256,8 +262,12 @@ def cleanup_buckets(session): """Remove buckets from previous test runs.""" pdm_install(session, 'test') session.run( - 'pytest', '-s', '-x', *PYTEST_GLOBAL_ARGS, *session.posargs, - 'test/integration/cleanup_buckets.py' + 'pytest', + '-s', + '-x', + *PYTEST_GLOBAL_ARGS, + *session.posargs, + 'test/integration/cleanup_buckets.py', ) @@ -309,18 +319,25 @@ def bundle(session: nox.Session): # It is assumed that the last element will be the "latest stable". for binary_name, version in [('b2', versions[-1])] + list(zip(versions, versions)): - spec = template_spec.safe_substitute({ - 'VERSION': version, - 'NAME': binary_name, - }) + spec = template_spec.safe_substitute( + { + 'VERSION': version, + 'NAME': binary_name, + } + ) pathlib.Path(f'{binary_name}.spec').write_text(spec) session.run('pyinstaller', *session.posargs, f'{binary_name}.spec') if SYSTEM == 'linux' and not NO_STATICX: session.run( - 'staticx', '--no-compress', '--strip', '--loglevel', 'INFO', f'dist/{binary_name}', - f'dist/{binary_name}-static' + 'staticx', + '--no-compress', + '--strip', + '--loglevel', + 'INFO', + f'dist/{binary_name}', + f'dist/{binary_name}-static', ) session.run( 'mv', @@ -474,8 +491,16 @@ def doc(session): # session.notify('doc_cover') # disabled due to https://github.com/sphinx-doc/sphinx/issues/11678 else: sphinx_args[-2:-2] = [ - '-E', '--open-browser', '--watch', '../b2', '--ignore', '*.pyc', '--ignore', '*~', - '--ignore', 'source/subcommands/*' + '-E', + '--open-browser', + '--watch', + '../b2', + '--ignore', + '*.pyc', + '--ignore', + '*~', + '--ignore', + 'source/subcommands/*', ] session.run('sphinx-autobuild', *sphinx_args) @@ -555,7 +580,7 @@ def generate_dockerfile(session): dist_path = 'dist' full_name, description = _read_readme_name_and_description() - vcs_ref = session.run("git", "rev-parse", "HEAD", external=True, silent=True).strip() + vcs_ref = session.run('git', 'rev-parse', 'HEAD', external=True, silent=True).strip() built_distribution = list(pathlib.Path('.').glob(f'{dist_path}/*'))[0] template_mapping = dict( @@ -583,23 +608,25 @@ def run_docker_tests(session, image_tag): """Run unittests against a docker image.""" user_id = session.run('id', '-u', silent=True, external=True).strip() group_id = session.run('id', '-g', silent=True, external=True).strip() - docker_run_cmd = f"docker run -i --user {user_id}:{group_id} -v /tmp:/tmp:rw --env-file ENVFILE" + docker_run_cmd = f'docker run -i --user {user_id}:{group_id} -v /tmp:/tmp:rw --env-file ENVFILE' run_integration_test( - session, [ - "--sut", - f"{docker_run_cmd} {image_tag}", - "--env-file-cmd-placeholder", - "ENVFILE", - ] + session, + [ + '--sut', + f'{docker_run_cmd} {image_tag}', + '--env-file-cmd-placeholder', + 'ENVFILE', + ], ) for binary_name in get_versions(): run_integration_test( - session, [ - "--sut", - f"{docker_run_cmd} {image_tag} {binary_name}", - "--env-file-cmd-placeholder", - "ENVFILE", - ] + session, + [ + '--sut', + f'{docker_run_cmd} {image_tag} {binary_name}', + '--env-file-cmd-placeholder', + 'ENVFILE', + ], ) @@ -667,12 +694,14 @@ def make_release_commit(session): ) -def load_allowed_change_types(project_toml: pathlib.Path = pathlib.Path('./pyproject.toml') - ) -> set[str]: +def load_allowed_change_types( + project_toml: pathlib.Path = pathlib.Path('./pyproject.toml'), +) -> set[str]: """ Load the list of allowed change types from the pyproject.toml file. """ import tomllib + configuration = tomllib.loads(project_toml.read_text()) return set(entry['directory'] for entry in configuration['tool']['towncrier']['type']) @@ -689,7 +718,7 @@ def is_changelog_filename_valid(filename: str, allowed_change_types: set[str]) - description, change_type, extension = filename.rsplit('.', maxsplit=2) except ValueError: # Not enough values to unpack. - return False, "Doesn't follow the \"..md\" pattern." + return False, 'Doesn\'t follow the "..md" pattern.' # Check whether the filename ends with .md. if extension != wanted_extension: diff --git a/pyproject.toml b/pyproject.toml index d7cc7ff64..7da7b3025 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -118,6 +118,7 @@ ignore = [ "D100", "D105", "D107", "D200", "D202", "D203", "D205", "D212", "D400", "D401", "D415", "D101", "D102", "D103", "D104", # TODO remove once we have docstring for all public methods "E501", # TODO: remove E501 once docstrings are formatted + "UP031", ] line-length = 100 target-version = "py38" diff --git a/test/conftest.py b/test/conftest.py index b2c10da7f..080e01d30 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -18,15 +18,15 @@ @pytest.hookimpl def pytest_configure(config): config.addinivalue_line( - "markers", - "apiver(from_ver, to_ver): run tests only on certain apiver versions", + 'markers', + 'apiver(from_ver, to_ver): run tests only on certain apiver versions', ) @pytest.fixture(scope='session') def apiver(request): """Get apiver as a v-prefixed string, e.g. "v2".""" - return removeprefix(request.config.getoption('--cli', '').lstrip('_'), "b2") or None + return removeprefix(request.config.getoption('--cli', '').lstrip('_'), 'b2') or None @pytest.fixture(scope='session') diff --git a/test/integration/conftest.py b/test/integration/conftest.py index bd3b941f4..3b6684b66 100755 --- a/test/integration/conftest.py +++ b/test/integration/conftest.py @@ -46,7 +46,7 @@ @pytest.fixture(scope='session', autouse=True) def summary_notes(request, worker_id): - capmanager = request.config.pluginmanager.getplugin("capturemanager") + capmanager = request.config.pluginmanager.getplugin('capturemanager') with capmanager.global_and_fixture_disabled(): log_handler = logging.StreamHandler(sys.stderr) log_fmt = logging.Formatter(f'{worker_id} %(asctime)s %(levelname).1s %(message)s') @@ -62,7 +62,7 @@ def append(self, note): @pytest.fixture(scope='session', autouse=True) def node_stats(summary_notes): - summary_notes.append(f"NODE={NODE_DESCRIPTION} seed={RNG_SEED}") + summary_notes.append(f'NODE={NODE_DESCRIPTION} seed={RNG_SEED}') @pytest.hookimpl @@ -79,7 +79,7 @@ def pytest_addoption(parser): 'If specified, all occurrences of this string in `--sut` will be substituted with a ' 'path to a tmp file containing env vars to be used when running commands in tests. Useful ' 'for docker.' - ) + ), ) parser.addoption( '--as_version', @@ -116,7 +116,7 @@ def apiver_int(request): @pytest.fixture(scope='session') def apiver(apiver_int): - return f"v{apiver_int}" + return f'v{apiver_int}' @pytest.hookimpl @@ -258,14 +258,20 @@ def b2_api( yield api api.clean_buckets() # showing account_id in the logs is safe; so we explicitly prevent it from being redacted - summary_notes.append(f"B2 Account ID: {api.account_id[:1]!r}{api.account_id[1:]!r}") - summary_notes.append(f"Buckets names used during this tests: {api.bucket_name_log!r}") + summary_notes.append(f'B2 Account ID: {api.account_id[:1]!r}{api.account_id[1:]!r}') + summary_notes.append(f'Buckets names used during this tests: {api.bucket_name_log!r}') @pytest.fixture(scope='module') def global_b2_tool( - request, application_key_id, application_key, realm, this_run_bucket_name_prefix, b2_api, - auto_change_account_info_dir, b2_uri_args + request, + application_key_id, + application_key, + realm, + this_run_bucket_name_prefix, + b2_api, + auto_change_account_info_dir, + b2_uri_args, ) -> CommandLine: tool = CommandLine( request.config.getoption('--sut'), @@ -335,12 +341,12 @@ def is_running_on_docker(pytestconfig): SECRET_FIXTURES = {'application_key', 'application_key_id'} -@pytest.fixture(scope="session") +@pytest.fixture(scope='session') def homedir(tmp_path_factory): - yield tmp_path_factory.mktemp("test_homedir") + yield tmp_path_factory.mktemp('test_homedir') -@pytest.fixture(scope="session") +@pytest.fixture(scope='session') def b2_in_path(tmp_path_factory): """ Create a dummy b2 executable in a temporary directory and add it to PATH. @@ -348,31 +354,31 @@ def b2_in_path(tmp_path_factory): This allows us to test the b2 command from shell level even if tested `b2` package was not installed. """ - tempdir = tmp_path_factory.mktemp("temp_bin") - temp_executable = tempdir / "b2" - with open(temp_executable, "w") as f: + tempdir = tmp_path_factory.mktemp('temp_bin') + temp_executable = tempdir / 'b2' + with open(temp_executable, 'w') as f: f.write( - f"#!{sys.executable}\n" - "import sys\n" - f"sys.path.insert(0, {os.getcwd()!r})\n" # ensure relative imports work even if command is run in different directory - "from b2.console_tool import main\n" - "main()\n" + f'#!{sys.executable}\n' + 'import sys\n' + f'sys.path.insert(0, {os.getcwd()!r})\n' # ensure relative imports work even if command is run in different directory + 'from b2.console_tool import main\n' + 'main()\n' ) temp_executable.chmod(0o700) - original_path = os.environ["PATH"] - new_path = f"{tempdir}:{original_path}" + original_path = os.environ['PATH'] + new_path = f'{tempdir}:{original_path}' yield new_path -@pytest.fixture(scope="module") +@pytest.fixture(scope='module') def env(b2_in_path, homedir, monkeysession, is_running_on_docker): """Get ENV for running b2 command from shell level.""" if not is_running_on_docker: monkeysession.setenv('PATH', b2_in_path) monkeysession.setenv('HOME', str(homedir)) - monkeysession.setenv('SHELL', "/bin/bash") # fix for running under github actions + monkeysession.setenv('SHELL', '/bin/bash') # fix for running under github actions yield os.environ @@ -383,14 +389,14 @@ def bash_runner(env): def run_command(command: str): try: return subprocess.run( - ["/bin/bash", "-c", command], + ['/bin/bash', '-c', command], capture_output=True, check=True, env=env, text=True, ) except subprocess.CalledProcessError as e: - print(f"Command {command!r} failed with exit code {e.returncode}") + print(f'Command {command!r} failed with exit code {e.returncode}') print(e.stdout) print(e.stderr, file=sys.stderr) raise @@ -418,7 +424,7 @@ def b2_uri_args(apiver_int): # -- Persistent bucket fixtures -- @pytest.fixture def unique_subfolder(): - subfolder = f"test-{uuid.uuid4().hex[:8]}" + subfolder = f'test-{uuid.uuid4().hex[:8]}' yield subfolder diff --git a/test/integration/helpers.py b/test/integration/helpers.py index b128254b2..4f057f9a7 100755 --- a/test/integration/helpers.py +++ b/test/integration/helpers.py @@ -68,7 +68,7 @@ BUCKET_NAME_LENGTH = BUCKET_NAME_LENGTH_RANGE[1] BUCKET_CREATED_AT_MILLIS = 'created_at_millis' -NODE_DESCRIPTION = f"{platform.node()}: {platform.platform()}" +NODE_DESCRIPTION = f'{platform.node()}: {platform.platform()}' def get_seed(): @@ -83,8 +83,9 @@ def get_seed(): str(time.time_ns()).encode(), NODE_DESCRIPTION.encode(), str(os.getpid()).encode(), # needed due to pytest-xdist - str(environ).encode('utf8', errors='ignore' - ), # especially helpful under GitHub (and similar) CI + str(environ).encode( + 'utf8', errors='ignore' + ), # especially helpful under GitHub (and similar) CI ) ) @@ -96,7 +97,9 @@ def get_seed(): if sys.version_info < (3, 9): RNG.randbytes = lambda n: RNG.getrandbits(n * 8).to_bytes(n, 'little') -SSE_NONE = EncryptionSetting(mode=EncryptionMode.NONE,) +SSE_NONE = EncryptionSetting( + mode=EncryptionMode.NONE, +) SSE_B2_AES = EncryptionSetting( mode=EncryptionMode.SSE_B2, algorithm=EncryptionAlgorithm.AES256, @@ -105,12 +108,12 @@ def get_seed(): SSE_C_AES = EncryptionSetting( mode=EncryptionMode.SSE_C, algorithm=EncryptionAlgorithm.AES256, - key=EncryptionKey(secret=_SSE_KEY, key_id='user-generated-key-id') + key=EncryptionKey(secret=_SSE_KEY, key_id='user-generated-key-id'), ) SSE_C_AES_2 = EncryptionSetting( mode=EncryptionMode.SSE_C, algorithm=EncryptionAlgorithm.AES256, - key=EncryptionKey(secret=_SSE_KEY, key_id='another-user-generated-key-id') + key=EncryptionKey(secret=_SSE_KEY, key_id='another-user-generated-key-id'), ) @@ -145,9 +148,9 @@ def __post_init__(self): cache = InMemoryCache() self.api = B2Api(info, cache=cache) self.api.authorize_account(self.realm, self.account_id, self.application_key) - assert BUCKET_NAME_LENGTH - len( - self.this_run_bucket_name_prefix - ) > 5, self.this_run_bucket_name_prefix + assert ( + BUCKET_NAME_LENGTH - len(self.this_run_bucket_name_prefix) > 5 + ), self.this_run_bucket_name_prefix def new_bucket_name(self) -> str: bucket_name = self.this_run_bucket_name_prefix + bucket_name_part( @@ -159,7 +162,7 @@ def new_bucket_name(self) -> str: def new_bucket_info(self) -> dict: return { BUCKET_CREATED_AT_MILLIS: str(current_time_millis()), - "created_by": NODE_DESCRIPTION, + 'created_by': NODE_DESCRIPTION, } def create_bucket(self, bucket_type: str = 'allPublic', **kwargs) -> Bucket: @@ -179,8 +182,14 @@ def _should_remove_bucket(self, bucket: Bucket) -> tuple[bool, str]: delete_older_than = current_time_millis() - BUCKET_CLEANUP_PERIOD_MILLIS this_bucket_creation_time = int(bucket.bucket_info[BUCKET_CREATED_AT_MILLIS]) if this_bucket_creation_time < delete_older_than: - return True, f"this_bucket_creation_time={this_bucket_creation_time} < delete_older_than={delete_older_than}" - return False, f"this_bucket_creation_time={this_bucket_creation_time} >= delete_older_than={delete_older_than}" + return ( + True, + f'this_bucket_creation_time={this_bucket_creation_time} < delete_older_than={delete_older_than}', + ) + return ( + False, + f'this_bucket_creation_time={this_bucket_creation_time} >= delete_older_than={delete_older_than}', + ) else: return True, 'undefined ' + BUCKET_CREATED_AT_MILLIS return False, f'does not start with {self.general_bucket_name_prefix!r}' @@ -230,11 +239,13 @@ def clean_bucket(self, bucket: Bucket | str): if file_version_info.file_retention.mode == RetentionMode.GOVERNANCE: print('Removing retention from file version:', file_version_info.id_) self.api.update_file_retention( - file_version_info.id_, file_version_info.file_name, - NO_RETENTION_FILE_SETTING, True + file_version_info.id_, + file_version_info.file_name, + NO_RETENTION_FILE_SETTING, + True, ) elif file_version_info.file_retention.mode == RetentionMode.COMPLIANCE: - if file_version_info.file_retention.retain_until > current_time_millis(): # yapf: disable + if file_version_info.file_retention.retain_until > current_time_millis(): print( f'File version: {file_version_info.id_} cannot be removed due to compliance mode retention' ) @@ -343,7 +354,6 @@ def should_equal(expected, actual): class CommandLine: - EXPECTED_STDERR_PATTERNS = [ re.compile(r'^Using https?://[\w.]+$'), # account auth re.compile(r'.*B/s]$', re.DOTALL), # progress bar @@ -353,8 +363,7 @@ class CommandLine: r'Set B2_DESTINATION_SSE_C_KEY_ID to allow key identification' ), re.compile( - r'WARNING: Unable to print unicode. Encoding for stdout is: ' - r'\'[a-zA-Z0-9]+\'' + r'WARNING: Unable to print unicode. Encoding for stdout is: ' r'\'[a-zA-Z0-9]+\'' ), # windows-bundle tests on CI use cp1252 re.compile(r'Trying to print: .*'), ] @@ -409,16 +418,19 @@ def should_succeed( assert status == 0, f'FAILED with status {status}, stderr={stderr}' if expected_stderr_pattern: - assert expected_stderr_pattern.search(stderr), \ - f'stderr did not match pattern="{expected_stderr_pattern}", stderr="{stderr}"' + assert expected_stderr_pattern.search( + stderr + ), f'stderr did not match pattern="{expected_stderr_pattern}", stderr="{stderr}"' elif stderr != '': for line in (s.strip() for s in stderr.split(os.linesep)): - assert any(p.match(line) for p in self.EXPECTED_STDERR_PATTERNS), \ - f'Unexpected stderr line: {repr(line)}' + assert any( + p.match(line) for p in self.EXPECTED_STDERR_PATTERNS + ), f'Unexpected stderr line: {repr(line)}' if expected_pattern is not None: - assert re.search(expected_pattern, stdout), \ - f'did not match pattern="{expected_pattern}", stdout="{stdout}"' + assert re.search( + expected_pattern, stdout + ), f'did not match pattern="{expected_pattern}", stdout="{stdout}"' return stdout.replace(os.linesep, '\n') @@ -515,16 +527,21 @@ def should_fail(self, args, expected_pattern, additional_env: dict | None = None status, stdout, stderr = self.execute(args, additional_env) assert status != 0, 'ERROR: should have failed' - assert re.search(expected_pattern, stdout + stderr), \ - f'did not match pattern="{expected_pattern}", stdout="{stdout}", stderr="{stderr}"' + assert re.search( + expected_pattern, stdout + stderr + ), f'did not match pattern="{expected_pattern}", stdout="{stdout}", stderr="{stderr}"' def reauthorize(self, check_key_capabilities=False): """Clear and authorize again to the account.""" self.should_succeed(['account', 'clear']) self.should_succeed( [ - 'account', 'authorize', '--environment', self.realm, self.account_id, - self.application_key + 'account', + 'authorize', + '--environment', + self.realm, + self.account_id, + self.application_key, ] ) if check_key_capabilities: @@ -533,9 +550,12 @@ def reauthorize(self, check_key_capabilities=False): 'readBucketNotifications', 'writeBucketNotifications', } - missing_capabilities = set(ALL_CAPABILITIES) - { - 'readBuckets', 'listAllBucketNames' - } - private_preview_caps - set(auth_dict['allowed']['capabilities']) + missing_capabilities = ( + set(ALL_CAPABILITIES) + - {'readBuckets', 'listAllBucketNames'} + - private_preview_caps + - set(auth_dict['allowed']['capabilities']) + ) assert not missing_capabilities, f'it appears that the raw_api integration test is being run with a non-full key. Missing capabilities: {missing_capabilities}' def list_file_versions(self, bucket_name, path=''): diff --git a/test/integration/persistent_bucket.py b/test/integration/persistent_bucket.py index b67c45bac..58c7b1c23 100644 --- a/test/integration/persistent_bucket.py +++ b/test/integration/persistent_bucket.py @@ -11,13 +11,14 @@ import os from dataclasses import dataclass from functools import cached_property -from test.integration.helpers import BUCKET_NAME_LENGTH, Api import backoff from b2sdk.v2 import Bucket from b2sdk.v2.exception import DuplicateBucketName, NonExistentBucket -PERSISTENT_BUCKET_NAME_PREFIX = "constst" +from test.integration.helpers import BUCKET_NAME_LENGTH, Api + +PERSISTENT_BUCKET_NAME_PREFIX = 'constst' @dataclass @@ -27,13 +28,13 @@ class PersistentBucketAggregate: @cached_property def virtual_bucket_name(self): - return f"{self.bucket_name}/{self.subfolder}" + return f'{self.bucket_name}/{self.subfolder}' def get_persistent_bucket_name(b2_api: Api) -> str: - bucket_base = os.environ.get("GITHUB_REPOSITORY_ID", b2_api.api.get_account_id()) + bucket_base = os.environ.get('GITHUB_REPOSITORY_ID', b2_api.api.get_account_id()) bucket_hash = hashlib.sha256(bucket_base.encode()).hexdigest() - return f"{PERSISTENT_BUCKET_NAME_PREFIX}-{bucket_hash}" [:BUCKET_NAME_LENGTH] + return f'{PERSISTENT_BUCKET_NAME_PREFIX}-{bucket_hash}'[:BUCKET_NAME_LENGTH] @backoff.on_exception( @@ -49,15 +50,15 @@ def get_or_create_persistent_bucket(b2_api: Api) -> Bucket: except NonExistentBucket: bucket = b2_api.api.create_bucket( bucket_name, - bucket_type="allPublic", + bucket_type='allPublic', lifecycle_rules=[ { - "daysFromHidingToDeleting": 1, - "daysFromUploadingToHiding": 1, - "fileNamePrefix": "", + 'daysFromHidingToDeleting': 1, + 'daysFromUploadingToHiding': 1, + 'fileNamePrefix': '', } ], ) # add the new bucket name to the list of bucket names b2_api.bucket_name_log.append(bucket_name) - return bucket \ No newline at end of file + return bucket diff --git a/test/integration/test_autocomplete.py b/test/integration/test_autocomplete.py index 82420aba8..13e4bb652 100644 --- a/test/integration/test_autocomplete.py +++ b/test/integration/test_autocomplete.py @@ -9,11 +9,12 @@ ###################################################################### import sys -from test.helpers import skip_on_windows import pexpect import pytest +from test.helpers import skip_on_windows + TIMEOUT = 120 # CI can be slow at times when parallelization is extreme BASHRC_CONTENT = """\ @@ -26,19 +27,19 @@ """ -@pytest.fixture(scope="session") +@pytest.fixture(scope='session') def bashrc(homedir): - bashrc_path = (homedir / '.bashrc') + bashrc_path = homedir / '.bashrc' bashrc_path.write_text(BASHRC_CONTENT) yield bashrc_path -@pytest.fixture(scope="module") +@pytest.fixture(scope='module') def cli_command(request) -> str: return request.config.getoption('--sut') -@pytest.fixture(scope="module") +@pytest.fixture(scope='module') def autocomplete_installed(env, homedir, bashrc, cli_version, cli_command, is_running_on_docker): if is_running_on_docker: pytest.skip('Not supported on Docker') @@ -68,7 +69,7 @@ def test_autocomplete_b2_commands(autocomplete_installed, is_running_on_docker, if is_running_on_docker: pytest.skip('Not supported on Docker') shell.send(f'{cli_version} \t\t') - shell.expect_exact(["authorize-account", "download-file", "get-bucket"], timeout=TIMEOUT) + shell.expect_exact(['authorize-account', 'download-file', 'get-bucket'], timeout=TIMEOUT) @skip_on_windows @@ -79,9 +80,9 @@ def test_autocomplete_b2_only_matching_commands( pytest.skip('Not supported on Docker') shell.send(f'{cli_version} delete-\t\t') - shell.expect_exact("file", timeout=TIMEOUT) # common part of remaining cmds is autocompleted + shell.expect_exact('file', timeout=TIMEOUT) # common part of remaining cmds is autocompleted with pytest.raises(pexpect.exceptions.TIMEOUT): # no other commands are suggested - shell.expect_exact("get-bucket", timeout=0.5) + shell.expect_exact('get-bucket', timeout=0.5) @skip_on_windows @@ -98,7 +99,7 @@ def test_autocomplete_b2__download_file__b2uri( if is_running_on_docker: pytest.skip('Not supported on Docker') shell.send(f'{cli_version} file download \t\t') - shell.expect_exact("b2://", timeout=TIMEOUT) + shell.expect_exact('b2://', timeout=TIMEOUT) shell.send('b2://\t\t') shell.expect_exact(bucket_name, timeout=TIMEOUT) shell.send(f'{bucket_name}/\t\t') diff --git a/test/integration/test_b2_command_line.py b/test/integration/test_b2_command_line.py index a2b6c1edd..4503019fe 100755 --- a/test/integration/test_b2_command_line.py +++ b/test/integration/test_b2_command_line.py @@ -114,7 +114,7 @@ def test_authorize_account_via_env_vars_saving_credentials( B2_ENVIRONMENT_ENV_VAR: realm, B2_APPLICATION_KEY_ID_ENV_VAR: application_key_id, B2_APPLICATION_KEY_ENV_VAR: application_key, - } + }, ) assert account_info_file.exists() @@ -146,7 +146,7 @@ def test_clear_account_with_env_vars( B2_ENVIRONMENT_ENV_VAR: realm, B2_APPLICATION_KEY_ID_ENV_VAR: application_key_id, B2_APPLICATION_KEY_ENV_VAR: application_key, - } + }, ) assert account_info_file.exists() @@ -183,7 +183,7 @@ def test_command_with_env_vars_saving_credentials( B2_ENVIRONMENT_ENV_VAR: realm, B2_APPLICATION_KEY_ID_ENV_VAR: application_key_id, B2_APPLICATION_KEY_ENV_VAR: application_key, - } + }, ) assert account_info_file.exists() @@ -218,7 +218,7 @@ def test_command_with_env_vars_not_saving_credentials( B2_ENVIRONMENT_ENV_VAR: realm, B2_APPLICATION_KEY_ID_ENV_VAR: application_key_id, B2_APPLICATION_KEY_ENV_VAR: application_key, - } + }, ) assert account_info_file.exists() @@ -262,7 +262,7 @@ def test_command_with_env_vars_reusing_existing_account_info( B2_ENVIRONMENT_ENV_VAR: realm, B2_APPLICATION_KEY_ID_ENV_VAR: application_key_id, B2_APPLICATION_KEY_ENV_VAR: application_key, - } + }, ) assert account_info_file.exists() @@ -274,8 +274,12 @@ def test_command_with_env_vars_reusing_existing_account_info( def uploaded_sample_file(b2_tool, persistent_bucket, sample_filepath): return b2_tool.should_succeed_json( [ - 'file', 'upload', '--quiet', persistent_bucket.bucket_name, - str(sample_filepath), f'{persistent_bucket.subfolder}/sample_file' + 'file', + 'upload', + '--quiet', + persistent_bucket.bucket_name, + str(sample_filepath), + f'{persistent_bucket.subfolder}/sample_file', ] ) @@ -284,24 +288,25 @@ def test_download(b2_tool, persistent_bucket, sample_filepath, uploaded_sample_f output_a = tmp_path / 'a' b2_tool.should_succeed( [ - 'file', 'download', '--quiet', + 'file', + 'download', + '--quiet', f"b2://{persistent_bucket.bucket_name}/{uploaded_sample_file['fileName']}", - str(output_a) + str(output_a), ] ) assert output_a.read_text() == sample_filepath.read_text() output_b = tmp_path / 'b' b2_tool.should_succeed( - ['file', 'download', '--quiet', f"b2id://{uploaded_sample_file['fileId']}", - str(output_b)] + ['file', 'download', '--quiet', f"b2id://{uploaded_sample_file['fileId']}", str(output_b)] ) assert output_b.read_text() == sample_filepath.read_text() def test_basic(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args, apiver_int): bucket_name = persistent_bucket.bucket_name - subfolder = f"{persistent_bucket.subfolder}/" + subfolder = f'{persistent_bucket.subfolder}/' file_mod_time_str = str(file_mod_time_millis(sample_file)) file_data = read_file(sample_file) @@ -325,20 +330,47 @@ def test_basic(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args, a ) b2_tool.should_succeed( [ - 'file', 'upload', '--no-progress', '--sha1', hex_sha1, '--info', 'foo=bar=baz', - '--info', 'color=blue', bucket_name, sample_file, f'{subfolder}c' + 'file', + 'upload', + '--no-progress', + '--sha1', + hex_sha1, + '--info', + 'foo=bar=baz', + '--info', + 'color=blue', + bucket_name, + sample_file, + f'{subfolder}c', ] ) b2_tool.should_fail( [ - 'file', 'upload', '--no-progress', '--sha1', hex_sha1, '--info', 'foo-bar', '--info', - 'color=blue', bucket_name, sample_file, f'{subfolder}c' - ], r'ERROR: Bad file info: foo-bar' + 'file', + 'upload', + '--no-progress', + '--sha1', + hex_sha1, + '--info', + 'foo-bar', + '--info', + 'color=blue', + bucket_name, + sample_file, + f'{subfolder}c', + ], + r'ERROR: Bad file info: foo-bar', ) b2_tool.should_succeed( [ - 'file', 'upload', '--no-progress', '--content-type', 'text/plain', bucket_name, - sample_file, f'{subfolder}d' + 'file', + 'upload', + '--no-progress', + '--content-type', + 'text/plain', + bucket_name, + sample_file, + f'{subfolder}d', ] ) @@ -354,8 +386,11 @@ def test_basic(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args, a ) list_of_files = b2_tool.should_succeed_json( [ - 'ls', '--json', '--recursive', '--with-wildcard', - *b2_uri_args(bucket_name, f'{subfolder}rm*') + 'ls', + '--json', + '--recursive', + '--with-wildcard', + *b2_uri_args(bucket_name, f'{subfolder}rm*'), ] ) should_equal([f'{subfolder}rm1'], [f['fileName'] for f in list_of_files]) @@ -378,7 +413,8 @@ def test_basic(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args, a f'{subfolder}b/1', f'{subfolder}b/2', f'{subfolder}d', - ], [f['fileName'] for f in list_of_files] + ], + [f['fileName'] for f in list_of_files], ) b2_tool.should_succeed(['file', 'unhide', f'b2://{persistent_bucket.virtual_bucket_name}/c']) @@ -393,7 +429,8 @@ def test_basic(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args, a f'{subfolder}b/2', f'{subfolder}c', f'{subfolder}d', - ], [f['fileName'] for f in list_of_files] + ], + [f['fileName'] for f in list_of_files], ) b2_tool.should_succeed(['file', 'hide', f'b2://{bucket_name}/{subfolder}c']) @@ -407,7 +444,8 @@ def test_basic(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args, a f'{subfolder}b/1', f'{subfolder}b/2', f'{subfolder}d', - ], [f['fileName'] for f in list_of_files] + ], + [f['fileName'] for f in list_of_files], ) list_of_files = b2_tool.should_succeed_json( @@ -422,11 +460,12 @@ def test_basic(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args, a f'{subfolder}c', f'{subfolder}c', f'{subfolder}d', - ], [f['fileName'] for f in list_of_files] + ], + [f['fileName'] for f in list_of_files], ) should_equal( ['upload', 'upload', 'upload', 'upload', 'hide', 'upload', 'upload'], - [f['action'] for f in list_of_files] + [f['action'] for f in list_of_files], ) first_a_version = list_of_files[0] @@ -437,23 +476,28 @@ def test_basic(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args, a ['ls', '--json', '--recursive', '--versions', *b2_uri_args(bucket_name, f'{subfolder}c')] ) if apiver_int >= 4: # b2://bucketName/c should list all c versions on v4 - should_equal([ - f'{subfolder}c', - f'{subfolder}c', - ], [f['fileName'] for f in list_of_files]) + should_equal( + [ + f'{subfolder}c', + f'{subfolder}c', + ], + [f['fileName'] for f in list_of_files], + ) else: should_equal([], [f['fileName'] for f in list_of_files]) b2_tool.should_succeed( [ - 'file', 'server-side-copy', f'b2id://{first_a_version["fileId"]}', - f'b2://{bucket_name}/{subfolder}x' + 'file', + 'server-side-copy', + f'b2id://{first_a_version["fileId"]}', + f'b2://{bucket_name}/{subfolder}x', ] ) b2_tool.should_succeed( ['ls', *b2_uri_args(bucket_name, f'{subfolder}')], - '^{0}a{1}{0}b/{1}{0}d{1}'.format(subfolder, os.linesep) + f'^{subfolder}a{os.linesep}{subfolder}b/{os.linesep}{subfolder}d{os.linesep}', ) # file_id, action, date, time, size(, replication), name @@ -461,25 +505,26 @@ def test_basic(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args, a ['ls', '--long', *b2_uri_args(bucket_name, f'{subfolder}')], '^4_z.* upload .* {1} {2}a{0}.* - .* {2}b/{0}4_z.* upload .* {1} {2}d{0}'.format( os.linesep, len(file_data), subfolder - ) + ), ) b2_tool.should_succeed( ['ls', '--long', '--replication', *b2_uri_args(bucket_name, f'{subfolder}')], - '^4_z.* upload .* {1} - {2}a{0}.* - .* - {2}b/{0}4_z.* upload .* {1} - {2}d{0}'. - format(os.linesep, len(file_data), subfolder) + '^4_z.* upload .* {1} - {2}a{0}.* - .* - {2}b/{0}4_z.* upload .* {1} - {2}d{0}'.format( + os.linesep, len(file_data), subfolder + ), ) b2_tool.should_succeed( ['ls', '--versions', *b2_uri_args(bucket_name, f'{subfolder}')], - f'^{subfolder}a{os.linesep}{subfolder}a{os.linesep}{subfolder}b/{os.linesep}{subfolder}c{os.linesep}{subfolder}c{os.linesep}{subfolder}d{os.linesep}' + f'^{subfolder}a{os.linesep}{subfolder}a{os.linesep}{subfolder}b/{os.linesep}{subfolder}c{os.linesep}{subfolder}c{os.linesep}{subfolder}d{os.linesep}', ) b2_tool.should_succeed( ['ls', *b2_uri_args(bucket_name, f'{subfolder}b')], - f'^{subfolder}b/1{os.linesep}{subfolder}b/2{os.linesep}' + f'^{subfolder}b/1{os.linesep}{subfolder}b/2{os.linesep}', ) b2_tool.should_succeed( ['ls', *b2_uri_args(bucket_name, f'{subfolder}b/')], - f'^{subfolder}b/1{os.linesep}{subfolder}b/2{os.linesep}' + f'^{subfolder}b/1{os.linesep}{subfolder}b/2{os.linesep}', ) file_info = b2_tool.should_succeed_json( @@ -488,7 +533,7 @@ def test_basic(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args, a expected_info = { 'color': 'blue', 'foo': 'bar=baz', - 'src_last_modified_millis': file_mod_time_str + 'src_last_modified_millis': file_mod_time_str, } should_equal(expected_info, file_info['fileInfo']) @@ -496,17 +541,17 @@ def test_basic(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args, a ['delete-file-version', f'{subfolder}c', first_c_version['fileId']], expected_stderr_pattern=re.compile( re.escape('WARNING: `delete-file-version` command is deprecated. Use `rm` instead.') - ) + ), ) b2_tool.should_succeed( - ['ls', *b2_uri_args(bucket_name, f"{subfolder}")], - f'^{subfolder}a{os.linesep}{subfolder}b/{os.linesep}{subfolder}c{os.linesep}{subfolder}d{os.linesep}' + ['ls', *b2_uri_args(bucket_name, f'{subfolder}')], + f'^{subfolder}a{os.linesep}{subfolder}b/{os.linesep}{subfolder}c{os.linesep}{subfolder}d{os.linesep}', ) b2_tool.should_succeed(['file', 'url', f"b2id://{second_c_version['fileId']}"]) b2_tool.should_succeed( - ['file', 'url', f"b2://{persistent_bucket.virtual_bucket_name}/any-file-name"], + ['file', 'url', f'b2://{persistent_bucket.virtual_bucket_name}/any-file-name'], '^https://.*/file/{}/{}\r?$'.format( persistent_bucket.virtual_bucket_name, 'any-file-name', @@ -545,18 +590,22 @@ def test_debug_logs(b2_tool, is_running_on_docker, tmp_path): *b2_tool.get_bucket_info_args(), ], ) - b2_tool.should_succeed(['bucket', 'delete', to_be_removed_bucket_name],) + b2_tool.should_succeed( + ['bucket', 'delete', to_be_removed_bucket_name], + ) b2_tool.should_fail( ['bucket', 'delete', to_be_removed_bucket_name], - re.compile(r'^ERROR: Bucket with id=\w* not found[^$]*$') + re.compile(r'^ERROR: Bucket with id=\w* not found[^$]*$'), ) # Check logging settings if not is_running_on_docker: # It's difficult to read the log in docker in CI b2_tool.should_fail( ['bucket', 'delete', to_be_removed_bucket_name, '--debug-logs'], - re.compile(r'^ERROR: Bucket with id=\w* not found[^$]*$') + re.compile(r'^ERROR: Bucket with id=\w* not found[^$]*$'), + ) + stack_trace_in_log = ( + r'Traceback \(most recent call last\):.*Bucket with id=\w* not found[^$]*' ) - stack_trace_in_log = r'Traceback \(most recent call last\):.*Bucket with id=\w* not found[^$]*' # the two regexes below depend on log message from urllib3, which is not perfect, but this test needs to # check global logging settings @@ -582,7 +631,7 @@ def test_debug_logs(b2_tool, is_running_on_docker, tmp_path): b2_tool.should_fail( ['bucket', 'delete', to_be_removed_bucket_name, '--verbose', '--debug-logs'], - stderr_regex + stderr_regex, ) with open('b2_cli.log') as logfile: log = logfile.read() @@ -597,8 +646,13 @@ def test_bucket(b2_tool, persistent_bucket): }""" output = b2_tool.should_succeed_json( [ - 'bucket', 'update', '--lifecycle-rule', rule, persistent_bucket.bucket_name, - 'allPublic', *b2_tool.get_bucket_info_args() + 'bucket', + 'update', + '--lifecycle-rule', + rule, + persistent_bucket.bucket_name, + 'allPublic', + *b2_tool.get_bucket_info_args(), ], ) @@ -609,12 +663,8 @@ def test_bucket(b2_tool, persistent_bucket): break ########## \\ doesn't happen on production, but messes up some tests // ########## - assert output["lifecycleRules"] == [ - { - "daysFromHidingToDeleting": 1, - "daysFromUploadingToHiding": None, - "fileNamePrefix": "" - } + assert output['lifecycleRules'] == [ + {'daysFromHidingToDeleting': 1, 'daysFromUploadingToHiding': None, 'fileNamePrefix': ''} ] @@ -637,9 +687,13 @@ def test_key_restrictions(b2_tool, bucket_name, sample_file, bucket_factory, b2_ ['account', 'authorize', '--environment', b2_tool.realm, key_one_id, key_one], ) - b2_tool.should_succeed(['bucket', 'get', bucket_name],) + b2_tool.should_succeed( + ['bucket', 'get', bucket_name], + ) second_bucket_name = bucket_factory().name - b2_tool.should_succeed(['bucket', 'get', second_bucket_name],) + b2_tool.should_succeed( + ['bucket', 'get', second_bucket_name], + ) key_two_name = 'clt-testKey-02' + random_hex(6) created_key_two_stdout = b2_tool.should_succeed( @@ -673,8 +727,12 @@ def test_key_restrictions(b2_tool, bucket_name, sample_file, bucket_factory, b2_ b2_tool.should_succeed( ['account', 'authorize', '--environment', b2_tool.realm, key_two_id, key_two], ) - b2_tool.should_succeed(['bucket', 'get', bucket_name],) - b2_tool.should_succeed(['ls', *b2_uri_args(bucket_name)],) + b2_tool.should_succeed( + ['bucket', 'get', bucket_name], + ) + b2_tool.should_succeed( + ['ls', *b2_uri_args(bucket_name)], + ) b2_tool.should_succeed( ['account', 'authorize', '--environment', b2_tool.realm, key_three_id, key_three], @@ -682,10 +740,12 @@ def test_key_restrictions(b2_tool, bucket_name, sample_file, bucket_factory, b2_ # Capabilities can be listed in any order. While this regex doesn't confirm that all three are present, # in ensures that there are three in total. - failed_bucket_err = r'Deletion of file "test" \([^\)]+\) failed: unauthorized for ' \ - r'application key with capabilities ' \ - r"'(.*listFiles.*|.*listBuckets.*|.*readFiles.*){3}', " \ - r"restricted to bucket '%s' \(unauthorized\)" % bucket_name + failed_bucket_err = ( + r'Deletion of file "test" \([^\)]+\) failed: unauthorized for ' + r'application key with capabilities ' + r"'(.*listFiles.*|.*listBuckets.*|.*readFiles.*){3}', " + r"restricted to bucket '%s' \(unauthorized\)" % bucket_name + ) b2_tool.should_fail( ['rm', '--recursive', '--no-progress', *b2_uri_args(bucket_name)], failed_bucket_err ) @@ -702,8 +762,12 @@ def test_key_restrictions(b2_tool, bucket_name, sample_file, bucket_factory, b2_ # reauthorize with more capabilities for clean up b2_tool.should_succeed( [ - 'account', 'authorize', '--environment', b2_tool.realm, b2_tool.account_id, - b2_tool.application_key + 'account', + 'authorize', + '--environment', + b2_tool.realm, + b2_tool.account_id, + b2_tool.application_key, ] ) b2_tool.should_succeed(['key', 'delete', key_one_id]) @@ -721,8 +785,7 @@ def test_key_restrictions(b2_tool, bucket_name, sample_file, bucket_factory, b2_ def test_delete_bucket(b2_tool, bucket_name): b2_tool.should_succeed(['bucket', 'delete', bucket_name]) b2_tool.should_fail( - ['bucket', 'delete', bucket_name], - re.compile(r'^ERROR: Bucket with id=\w* not found[^$]*$') + ['bucket', 'delete', bucket_name], re.compile(r'^ERROR: Bucket with id=\w* not found[^$]*$') ) @@ -736,7 +799,6 @@ def test_rapid_bucket_operations(b2_tool): def test_account(b2_tool, cli_version, apiver_int, monkeypatch): - with monkeypatch.context() as mp: account_info_file_path = os.path.join(mkdtemp(), 'b2_account_info') mp.setenv(B2_ACCOUNT_INFO_ENV_VAR, account_info_file_path) @@ -768,8 +830,8 @@ def test_account(b2_tool, cli_version, apiver_int, monkeypatch): b2_tool.should_fail( ['bucket', 'create', bucket_name, 'allPrivate'], r'ERROR: Missing account data: \'NoneType\' object is not subscriptable (\(key 0\) )? ' - fr'Use: \'{cli_version}(\.(exe|EXE))? account authorize\' or provide auth data with \'B2_APPLICATION_KEY_ID\' and ' - r'\'B2_APPLICATION_KEY\' environment variables' + rf'Use: \'{cli_version}(\.(exe|EXE))? account authorize\' or provide auth data with \'B2_APPLICATION_KEY_ID\' and ' + r'\'B2_APPLICATION_KEY\' environment variables', ) with monkeypatch.context() as mp: @@ -790,7 +852,7 @@ def test_account(b2_tool, cli_version, apiver_int, monkeypatch): if apiver_int >= 4: assert not os.path.exists( account_info_file_path - ), 'sqlite file was created while it shouldn\'t' + ), "sqlite file was created while it shouldn't" else: assert os.path.exists(account_info_file_path), 'sqlite file was not created' account_info = SqliteAccountInfo(account_info_file_path) @@ -804,14 +866,14 @@ def test_account(b2_tool, cli_version, apiver_int, monkeypatch): os.environ['B2_APPLICATION_KEY'] = os.environ['B2_TEST_APPLICATION_KEY'] b2_tool.should_fail( ['bucket', 'create', bucket_name, 'allPrivate'], - r'Please provide both "B2_APPLICATION_KEY" and "B2_APPLICATION_KEY_ID" environment variables or none of them' + r'Please provide both "B2_APPLICATION_KEY" and "B2_APPLICATION_KEY_ID" environment variables or none of them', ) os.environ.pop('B2_APPLICATION_KEY') os.environ['B2_APPLICATION_KEY_ID'] = os.environ['B2_TEST_APPLICATION_KEY_ID'] b2_tool.should_fail( ['bucket', 'create', bucket_name, 'allPrivate'], - r'Please provide both "B2_APPLICATION_KEY" and "B2_APPLICATION_KEY_ID" environment variables or none of them' + r'Please provide both "B2_APPLICATION_KEY" and "B2_APPLICATION_KEY_ID" environment variables or none of them', ) os.environ.pop('B2_APPLICATION_KEY_ID') @@ -865,7 +927,9 @@ def encryption_summary(sse_dict, file_info): sse_dict = sse_dict.as_dict() encryption = sse_dict['mode'] assert encryption in ( - EncryptionMode.NONE.value, EncryptionMode.SSE_B2.value, EncryptionMode.SSE_C.value + EncryptionMode.NONE.value, + EncryptionMode.SSE_B2.value, + EncryptionMode.SSE_C.value, ) algorithm = sse_dict.get('algorithm') if algorithm is not None: @@ -878,7 +942,7 @@ def encryption_summary(sse_dict, file_info): @pytest.mark.parametrize( - "dir_, encryption", + 'dir_, encryption', [('sync', None), ('sync', SSE_B2_AES), ('sync', SSE_C_AES), ('', None)], ) def test_sync_up(tmp_path, b2_tool, persistent_bucket, apiver_int, dir_, encryption): @@ -936,15 +1000,23 @@ def test_sync_up(tmp_path, b2_tool, persistent_bucket, apiver_int, dir_, encrypt expected_encryption_str = encryption_summary(expected_encryption.as_dict(), {}) elif encryption == SSE_B2_AES: command = [ - 'sync', '--no-progress', '--destination-server-side-encryption', 'SSE-B2', tmp_path, - b2_sync_point + 'sync', + '--no-progress', + '--destination-server-side-encryption', + 'SSE-B2', + tmp_path, + b2_sync_point, ] expected_encryption = encryption expected_encryption_str = encryption_summary(expected_encryption.as_dict(), {}) elif encryption == SSE_C_AES: command = [ - 'sync', '--no-progress', '--destination-server-side-encryption', 'SSE-C', tmp_path, - b2_sync_point + 'sync', + '--no-progress', + '--destination-server-side-encryption', + 'SSE-C', + tmp_path, + b2_sync_point, ] expected_encryption = encryption additional_env = { @@ -975,14 +1047,14 @@ def test_sync_up(tmp_path, b2_tool, persistent_bucket, apiver_int, dir_, encrypt if encryption and encryption.mode == EncryptionMode.SSE_C: b2_tool.should_fail( command, - expected_pattern="ValueError: Using SSE-C requires providing an encryption key via " - "B2_DESTINATION_SSE_C_KEY_B64 env var" + expected_pattern='ValueError: Using SSE-C requires providing an encryption key via ' + 'B2_DESTINATION_SSE_C_KEY_B64 env var', ) if encryption is not None: return # that's enough, we've checked that encryption works, no need to repeat the whole sync suite c_id = find_file_id(file_versions, prefix + 'c') - file_info = b2_tool.should_succeed_json(['file', 'info', f"b2id://{c_id}"])['fileInfo'] + file_info = b2_tool.should_succeed_json(['file', 'info', f'b2id://{c_id}'])['fileInfo'] should_equal(file_mod_time_millis(tmp_path / 'c'), int(file_info['src_last_modified_millis'])) os.unlink(tmp_path / 'b') @@ -1003,7 +1075,8 @@ def test_sync_up(tmp_path, b2_tool, persistent_bucket, apiver_int, dir_, encrypt '+ ' + prefix + 'b', '+ ' + prefix + 'c', '+ ' + prefix + 'c', - ], file_version_summary(file_versions) + ], + file_version_summary(file_versions), ) os.unlink(tmp_path / 'a') @@ -1013,9 +1086,12 @@ def test_sync_up(tmp_path, b2_tool, persistent_bucket, apiver_int, dir_, encrypt file_versions = b2_tool.list_file_versions( persistent_bucket.bucket_name, persistent_bucket.subfolder ) - should_equal([ - '+ ' + prefix + 'c', - ], file_version_summary(file_versions)) + should_equal( + [ + '+ ' + prefix + 'c', + ], + file_version_summary(file_versions), + ) # test --compare-threshold with file size write_file(tmp_path / 'c', b'hello world!') @@ -1023,31 +1099,51 @@ def test_sync_up(tmp_path, b2_tool, persistent_bucket, apiver_int, dir_, encrypt # should not upload new version of c b2_tool.should_succeed( [ - 'sync', '--no-progress', '--keep-days', '10', '--compare-versions', 'size', - '--compare-threshold', '1', tmp_path, b2_sync_point + 'sync', + '--no-progress', + '--keep-days', + '10', + '--compare-versions', + 'size', + '--compare-threshold', + '1', + tmp_path, + b2_sync_point, ] ) file_versions = b2_tool.list_file_versions( persistent_bucket.bucket_name, persistent_bucket.subfolder ) - should_equal([ - '+ ' + prefix + 'c', - ], file_version_summary(file_versions)) + should_equal( + [ + '+ ' + prefix + 'c', + ], + file_version_summary(file_versions), + ) # should upload new version of c b2_tool.should_succeed( [ - 'sync', '--no-progress', '--keep-days', '10', '--compare-versions', 'size', tmp_path, - b2_sync_point + 'sync', + '--no-progress', + '--keep-days', + '10', + '--compare-versions', + 'size', + tmp_path, + b2_sync_point, ] ) file_versions = b2_tool.list_file_versions( persistent_bucket.bucket_name, persistent_bucket.subfolder ) - should_equal([ - '+ ' + prefix + 'c', - '+ ' + prefix + 'c', - ], file_version_summary(file_versions)) + should_equal( + [ + '+ ' + prefix + 'c', + '+ ' + prefix + 'c', + ], + file_version_summary(file_versions), + ) set_file_mod_time_millis(tmp_path / 'c', file_mod_time_millis(tmp_path / 'c') + 2000) @@ -1055,23 +1151,40 @@ def test_sync_up(tmp_path, b2_tool, persistent_bucket, apiver_int, dir_, encrypt # should not upload new version of c b2_tool.should_succeed( [ - 'sync', '--no-progress', '--keep-days', '10', '--compare-versions', 'modTime', - '--compare-threshold', '2000', tmp_path, b2_sync_point + 'sync', + '--no-progress', + '--keep-days', + '10', + '--compare-versions', + 'modTime', + '--compare-threshold', + '2000', + tmp_path, + b2_sync_point, ] ) file_versions = b2_tool.list_file_versions( persistent_bucket.bucket_name, persistent_bucket.subfolder ) - should_equal([ - '+ ' + prefix + 'c', - '+ ' + prefix + 'c', - ], file_version_summary(file_versions)) + should_equal( + [ + '+ ' + prefix + 'c', + '+ ' + prefix + 'c', + ], + file_version_summary(file_versions), + ) # should upload new version of c b2_tool.should_succeed( [ - 'sync', '--no-progress', '--keep-days', '10', '--compare-versions', 'modTime', tmp_path, - b2_sync_point + 'sync', + '--no-progress', + '--keep-days', + '10', + '--compare-versions', + 'modTime', + tmp_path, + b2_sync_point, ] ) file_versions = b2_tool.list_file_versions( @@ -1082,7 +1195,8 @@ def test_sync_up(tmp_path, b2_tool, persistent_bucket, apiver_int, dir_, encrypt '+ ' + prefix + 'c', '+ ' + prefix + 'c', '+ ' + prefix + 'c', - ], file_version_summary(file_versions) + ], + file_version_summary(file_versions), ) # create one more file @@ -1154,7 +1268,6 @@ def test_sync_down_sse_c_no_prefix(b2_tool, bucket_name, sample_file): def sync_down_helper(b2_tool, bucket_name, folder_in_bucket, sample_file, encryption=None): - b2_sync_point = f'b2:{bucket_name}' if folder_in_bucket: b2_sync_point += '/' + folder_in_bucket @@ -1188,13 +1301,13 @@ def sync_down_helper(b2_tool, bucket_name, folder_in_bucket, sample_file, encryp # Put a couple files in B2 b2_tool.should_succeed( - ['file', 'upload', '--no-progress', bucket_name, sample_file, b2_file_prefix + 'a'] + - upload_encryption_args, + ['file', 'upload', '--no-progress', bucket_name, sample_file, b2_file_prefix + 'a'] + + upload_encryption_args, additional_env=upload_additional_env, ) b2_tool.should_succeed( - ['file', 'upload', '--no-progress', bucket_name, sample_file, b2_file_prefix + 'b'] + - upload_encryption_args, + ['file', 'upload', '--no-progress', bucket_name, sample_file, b2_file_prefix + 'b'] + + upload_encryption_args, additional_env=upload_additional_env, ) @@ -1207,8 +1320,8 @@ def sync_down_helper(b2_tool, bucket_name, folder_in_bucket, sample_file, encryp # Put another file in B2 b2_tool.should_succeed( - ['file', 'upload', '--no-progress', bucket_name, sample_file, b2_file_prefix + 'c'] + - upload_encryption_args, + ['file', 'upload', '--no-progress', bucket_name, sample_file, b2_file_prefix + 'c'] + + upload_encryption_args, additional_env=upload_additional_env, ) @@ -1216,9 +1329,14 @@ def sync_down_helper(b2_tool, bucket_name, folder_in_bucket, sample_file, encryp mod_time = str((file_mod_time_millis(sample_file) - 10) / 1000) b2_tool.should_succeed( [ - 'sync', '--no-progress', '--exclude-if-modified-after', mod_time, b2_sync_point, - local_path - ] + sync_encryption_args, + 'sync', + '--no-progress', + '--exclude-if-modified-after', + mod_time, + b2_sync_point, + local_path, + ] + + sync_encryption_args, additional_env=sync_additional_env, ) should_equal(['a', 'b'], sorted(os.listdir(local_path))) @@ -1226,18 +1344,30 @@ def sync_down_helper(b2_tool, bucket_name, folder_in_bucket, sample_file, encryp # Put another file in B2 with a custom upload timestamp b2_tool.should_succeed( [ - 'file', 'upload', '--no-progress', '--custom-upload-timestamp', '1367900664152', - bucket_name, sample_file, b2_file_prefix + 'd' - ] + upload_encryption_args, + 'file', + 'upload', + '--no-progress', + '--custom-upload-timestamp', + '1367900664152', + bucket_name, + sample_file, + b2_file_prefix + 'd', + ] + + upload_encryption_args, additional_env=upload_additional_env, ) # Sync the files with one file being excluded because of upload timestamp b2_tool.should_succeed( [ - 'sync', '--no-progress', '--exclude-if-uploaded-after', '1367900664142', - b2_sync_point, local_path - ] + sync_encryption_args, + 'sync', + '--no-progress', + '--exclude-if-uploaded-after', + '1367900664142', + b2_sync_point, + local_path, + ] + + sync_encryption_args, additional_env=sync_additional_env, ) should_equal(['a', 'b'], sorted(os.listdir(local_path))) @@ -1258,8 +1388,7 @@ def sync_down_helper(b2_tool, bucket_name, folder_in_bucket, sample_file, encryp ) b2_tool.should_fail( ['sync', '--no-progress', b2_sync_point, new_local_path], - expected_pattern= - 'b2sdk._internal.exception.BadRequest: The object was stored using a form of Server Side ' + expected_pattern='b2sdk._internal.exception.BadRequest: The object was stored using a form of Server Side ' 'Encryption. The correct parameters must be provided to retrieve the object. ' r'\(bad_request\)', ) @@ -1279,7 +1408,7 @@ def test_sync_copy_no_prefix_default_encryption(bucket_factory, b2_tool, bucket_ '', sample_file=sample_file, destination_encryption=None, - expected_encryption=SSE_NONE + expected_encryption=SSE_NONE, ) @@ -1291,7 +1420,7 @@ def test_sync_copy_no_prefix_no_encryption(bucket_factory, b2_tool, bucket_name, '', sample_file=sample_file, destination_encryption=SSE_NONE, - expected_encryption=SSE_NONE + expected_encryption=SSE_NONE, ) @@ -1418,12 +1547,19 @@ def run_sync_copy_with_basic_checks( ): # Put a couple files in B2 if source_encryption is None or source_encryption.mode in ( - EncryptionMode.NONE, EncryptionMode.SSE_B2 + EncryptionMode.NONE, + EncryptionMode.SSE_B2, ): b2_tool.should_succeed( [ - 'file', 'upload', '--no-progress', '--destination-server-side-encryption', 'SSE-B2', - bucket_name, sample_file, b2_file_prefix + 'a' + 'file', + 'upload', + '--no-progress', + '--destination-server-side-encryption', + 'SSE-B2', + bucket_name, + sample_file, + b2_file_prefix + 'a', ] ) b2_tool.should_succeed( @@ -1433,14 +1569,20 @@ def run_sync_copy_with_basic_checks( for suffix in ['a', 'b']: b2_tool.should_succeed( [ - 'file', 'upload', '--no-progress', '--destination-server-side-encryption', - 'SSE-C', bucket_name, sample_file, b2_file_prefix + suffix + 'file', + 'upload', + '--no-progress', + '--destination-server-side-encryption', + 'SSE-C', + bucket_name, + sample_file, + b2_file_prefix + suffix, ], additional_env={ - 'B2_DESTINATION_SSE_C_KEY_B64': - base64.b64encode(source_encryption.key.secret).decode(), - 'B2_DESTINATION_SSE_C_KEY_ID': - source_encryption.key.key_id, + 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode( + source_encryption.key.secret + ).decode(), + 'B2_DESTINATION_SSE_C_KEY_ID': source_encryption.key.key_id, }, ) else: @@ -1452,43 +1594,53 @@ def run_sync_copy_with_basic_checks( elif destination_encryption == SSE_B2_AES: b2_tool.should_succeed( [ - 'sync', '--no-progress', '--destination-server-side-encryption', - destination_encryption.mode.value, b2_sync_point, other_b2_sync_point + 'sync', + '--no-progress', + '--destination-server-side-encryption', + destination_encryption.mode.value, + b2_sync_point, + other_b2_sync_point, ] ) elif destination_encryption.mode == EncryptionMode.SSE_C: b2_tool.should_fail( [ - 'sync', '--no-progress', '--destination-server-side-encryption', - destination_encryption.mode.value, b2_sync_point, other_b2_sync_point + 'sync', + '--no-progress', + '--destination-server-side-encryption', + destination_encryption.mode.value, + b2_sync_point, + other_b2_sync_point, ], additional_env={ - 'B2_DESTINATION_SSE_C_KEY_B64': - base64.b64encode(destination_encryption.key.secret).decode(), - 'B2_DESTINATION_SSE_C_KEY_ID': - destination_encryption.key.key_id, + 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode( + destination_encryption.key.secret + ).decode(), + 'B2_DESTINATION_SSE_C_KEY_ID': destination_encryption.key.key_id, }, - expected_pattern= - 'b2sdk._internal.exception.BadRequest: The object was stored using a form of Server Side ' + expected_pattern='b2sdk._internal.exception.BadRequest: The object was stored using a form of Server Side ' 'Encryption. The correct parameters must be provided to retrieve the object. ' - r'\(bad_request\)' + r'\(bad_request\)', ) b2_tool.should_succeed( [ - 'sync', '--no-progress', '--destination-server-side-encryption', - destination_encryption.mode.value, '--source-server-side-encryption', - source_encryption.mode.value, b2_sync_point, other_b2_sync_point + 'sync', + '--no-progress', + '--destination-server-side-encryption', + destination_encryption.mode.value, + '--source-server-side-encryption', + source_encryption.mode.value, + b2_sync_point, + other_b2_sync_point, ], additional_env={ - 'B2_DESTINATION_SSE_C_KEY_B64': - base64.b64encode(destination_encryption.key.secret).decode(), - 'B2_DESTINATION_SSE_C_KEY_ID': - destination_encryption.key.key_id, - 'B2_SOURCE_SSE_C_KEY_B64': - base64.b64encode(source_encryption.key.secret).decode(), - 'B2_SOURCE_SSE_C_KEY_ID': - source_encryption.key.key_id, - } + 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode( + destination_encryption.key.secret + ).decode(), + 'B2_DESTINATION_SSE_C_KEY_ID': destination_encryption.key.key_id, + 'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(source_encryption.key.secret).decode(), + 'B2_SOURCE_SSE_C_KEY_ID': source_encryption.key.key_id, + }, ) else: @@ -1506,7 +1658,7 @@ def test_sync_long_path(tmp_path, b2_tool, persistent_bucket): 'extremely_long_path_which_exceeds_windows_unfortunate_260_character_path_limit', 'and_needs_special_prefixes_containing_backslashes_added_to_overcome_this_limitation', 'when_doing_so_beware_leaning_toothpick_syndrome_as_it_can_cause_frustration', - 'see_also_xkcd_1638' + 'see_also_xkcd_1638', ) ) @@ -1574,8 +1726,13 @@ def test_sse_b2(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args): subfolder = persistent_bucket.subfolder b2_tool.should_succeed( [ - 'file', 'upload', '--destination-server-side-encryption=SSE-B2', '--quiet', bucket_name, - sample_file, f'{subfolder}/encrypted' + 'file', + 'upload', + '--destination-server-side-encryption=SSE-B2', + '--quiet', + bucket_name, + sample_file, + f'{subfolder}/encrypted', ] ) b2_tool.should_succeed( @@ -1584,14 +1741,20 @@ def test_sse_b2(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args): b2_tool.should_succeed( [ - 'file', 'download', '--quiet', f'b2://{bucket_name}/{subfolder}/encrypted', - tmp_path / 'encrypted' + 'file', + 'download', + '--quiet', + f'b2://{bucket_name}/{subfolder}/encrypted', + tmp_path / 'encrypted', ] ) b2_tool.should_succeed( [ - 'file', 'download', '--quiet', f'b2://{bucket_name}/{subfolder}/not_encrypted', - tmp_path / 'not_encrypted' + 'file', + 'download', + '--quiet', + f'b2://{bucket_name}/{subfolder}/not_encrypted', + tmp_path / 'not_encrypted', ] ) @@ -1599,12 +1762,8 @@ def test_sse_b2(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args): ['ls', '--json', '--recursive', *b2_uri_args(bucket_name, subfolder)] ) should_equal( - [{ - 'algorithm': 'AES256', - 'mode': 'SSE-B2' - }, { - 'mode': 'none' - }], [f['serverSideEncryption'] for f in list_of_files] + [{'algorithm': 'AES256', 'mode': 'SSE-B2'}, {'mode': 'none'}], + [f['serverSideEncryption'] for f in list_of_files], ) encrypted_version = list_of_files[0] @@ -1624,7 +1783,7 @@ def test_sse_b2(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args): 'server-side-copy', '--destination-server-side-encryption=SSE-B2', f"b2id://{encrypted_version['fileId']}", - f"b2://{bucket_name}/{subfolder}/copied_encrypted", + f'b2://{bucket_name}/{subfolder}/copied_encrypted', ] ) b2_tool.should_succeed( @@ -1632,7 +1791,7 @@ def test_sse_b2(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args): 'file', 'server-side-copy', f"b2id://{not_encrypted_version['fileId']}", - f"b2://{bucket_name}/{subfolder}/copied_not_encrypted", + f'b2://{bucket_name}/{subfolder}/copied_not_encrypted', ] ) @@ -1640,12 +1799,8 @@ def test_sse_b2(b2_tool, persistent_bucket, sample_file, tmp_path, b2_uri_args): ['ls', '--json', '--recursive', *b2_uri_args(bucket_name, subfolder)] ) should_equal( - [{ - 'algorithm': 'AES256', - 'mode': 'SSE-B2' - }, { - 'mode': 'none' - }] * 2, [f['serverSideEncryption'] for f in list_of_files] + [{'algorithm': 'AES256', 'mode': 'SSE-B2'}, {'mode': 'none'}] * 2, + [f['serverSideEncryption'] for f in list_of_files], ) copied_encrypted_version = list_of_files[2] @@ -1675,54 +1830,82 @@ def test_sse_c( b2_tool.should_fail( [ - 'file', 'upload', '--no-progress', '--quiet', '--destination-server-side-encryption', - 'SSE-C', bucket_name, sample_file, 'gonna-fail-anyway' + 'file', + 'upload', + '--no-progress', + '--quiet', + '--destination-server-side-encryption', + 'SSE-C', + bucket_name, + sample_file, + 'gonna-fail-anyway', ], - 'Using SSE-C requires providing an encryption key via B2_DESTINATION_SSE_C_KEY_B64 env var' + 'Using SSE-C requires providing an encryption key via B2_DESTINATION_SSE_C_KEY_B64 env var', ) file_version_info = b2_tool.should_succeed_json( [ - 'file', 'upload', '--no-progress', '--quiet', '--destination-server-side-encryption', - 'SSE-C', bucket_name, sample_file, f'{subfolder}/uploaded_encrypted' + 'file', + 'upload', + '--no-progress', + '--quiet', + '--destination-server-side-encryption', + 'SSE-C', + bucket_name, + sample_file, + f'{subfolder}/uploaded_encrypted', ], additional_env={ 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode(secret).decode(), 'B2_DESTINATION_SSE_C_KEY_ID': sse_c_key_id, - } + }, ) should_equal( { - "algorithm": "AES256", - "customerKey": "******", - "customerKeyMd5": "******", - "mode": "SSE-C" - }, file_version_info['serverSideEncryption'] + 'algorithm': 'AES256', + 'customerKey': '******', + 'customerKeyMd5': '******', + 'mode': 'SSE-C', + }, + file_version_info['serverSideEncryption'], ) should_equal(sse_c_key_id, file_version_info['fileInfo'][SSE_C_KEY_ID_FILE_INFO_KEY_NAME]) b2_tool.should_fail( [ - 'file', 'download', '--quiet', f'b2://{bucket_name}/{subfolder}/uploaded_encrypted', - 'gonna_fail_anyway' + 'file', + 'download', + '--quiet', + f'b2://{bucket_name}/{subfolder}/uploaded_encrypted', + 'gonna_fail_anyway', ], expected_pattern='ERROR: The object was stored using a form of Server Side Encryption. The ' - r'correct parameters must be provided to retrieve the object. \(bad_request\)' + r'correct parameters must be provided to retrieve the object. \(bad_request\)', ) b2_tool.should_fail( [ - 'file', 'download', '--quiet', '--source-server-side-encryption', 'SSE-C', - f'b2://{bucket_name}/{subfolder}/uploaded_encrypted', 'gonna_fail_anyway' + 'file', + 'download', + '--quiet', + '--source-server-side-encryption', + 'SSE-C', + f'b2://{bucket_name}/{subfolder}/uploaded_encrypted', + 'gonna_fail_anyway', ], expected_pattern='ValueError: Using SSE-C requires providing an encryption key via ' - 'B2_SOURCE_SSE_C_KEY_B64 env var' + 'B2_SOURCE_SSE_C_KEY_B64 env var', ) b2_tool.should_fail( [ - 'file', 'download', '--quiet', '--source-server-side-encryption', 'SSE-C', - f'b2://{bucket_name}/{subfolder}/uploaded_encrypted', 'gonna_fail_anyway' + 'file', + 'download', + '--quiet', + '--source-server-side-encryption', + 'SSE-C', + f'b2://{bucket_name}/{subfolder}/uploaded_encrypted', + 'gonna_fail_anyway', ], expected_pattern='ERROR: Wrong or no SSE-C key provided when reading a file.', - additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(os.urandom(32)).decode()} + additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(os.urandom(32)).decode()}, ) with contextlib.nullcontext(tmp_path) as dir_path: b2_tool.should_succeed( @@ -1736,7 +1919,7 @@ def test_sse_c( f'b2://{bucket_name}/{subfolder}/uploaded_encrypted', dir_path / 'a', ], - additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()} + additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()}, ) assert read_file(dir_path / 'a') == read_file(sample_file) b2_tool.should_succeed( @@ -1750,36 +1933,43 @@ def test_sse_c( f"b2id://{file_version_info['fileId']}", dir_path / 'b', ], - additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()} + additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()}, ) assert read_file(dir_path / 'b') == read_file(sample_file) b2_tool.should_fail( [ - 'file', 'server-side-copy', f'b2id://{file_version_info["fileId"]}', - f'b2://{bucket_name}/gonna-fail-anyway' + 'file', + 'server-side-copy', + f'b2id://{file_version_info["fileId"]}', + f'b2://{bucket_name}/gonna-fail-anyway', ], - expected_pattern= - 'ERROR: The object was stored using a form of Server Side Encryption. The correct ' - r'parameters must be provided to retrieve the object. \(bad_request\)' + expected_pattern='ERROR: The object was stored using a form of Server Side Encryption. The correct ' + r'parameters must be provided to retrieve the object. \(bad_request\)', ) b2_tool.should_fail( [ - 'file', 'server-side-copy', '--source-server-side-encryption=SSE-C', - f'b2id://{file_version_info["fileId"]}', f'b2://{bucket_name}/gonna-fail-anyway' + 'file', + 'server-side-copy', + '--source-server-side-encryption=SSE-C', + f'b2id://{file_version_info["fileId"]}', + f'b2://{bucket_name}/gonna-fail-anyway', ], expected_pattern='ValueError: Using SSE-C requires providing an encryption key via ' - 'B2_SOURCE_SSE_C_KEY_B64 env var' + 'B2_SOURCE_SSE_C_KEY_B64 env var', ) b2_tool.should_fail( [ - 'file', 'server-side-copy', '--source-server-side-encryption=SSE-C', - '--destination-server-side-encryption=SSE-C', f'b2id://{file_version_info["fileId"]}', - f'b2://{bucket_name}/gonna-fail-anyway' + 'file', + 'server-side-copy', + '--source-server-side-encryption=SSE-C', + '--destination-server-side-encryption=SSE-C', + f'b2id://{file_version_info["fileId"]}', + f'b2://{bucket_name}/gonna-fail-anyway', ], expected_pattern='ValueError: Using SSE-C requires providing an encryption key via ' 'B2_DESTINATION_SSE_C_KEY_B64 env var', - additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()} + additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()}, ) b2_tool.should_fail( [ @@ -1790,8 +1980,7 @@ def test_sse_c( f'b2://{bucket_name}/gonna-fail-anyway', ], additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()}, - expected_pattern= - 'Attempting to copy file with metadata while either source or destination uses ' + expected_pattern='Attempting to copy file with metadata while either source or destination uses ' 'SSE-C. Use --fetch-metadata to fetch source file metadata before copying.', ) b2_tool.should_succeed( @@ -1806,7 +1995,7 @@ def test_sse_c( '--content-type', 'text/plain', ], - additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()} + additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()}, ) b2_tool.should_succeed( [ @@ -1819,7 +2008,7 @@ def test_sse_c( '--content-type', 'text/plain', ], - additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()} + additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()}, ) b2_tool.should_succeed( [ @@ -1830,7 +2019,7 @@ def test_sse_c( f'b2://{bucket_name}/{subfolder}/not_encrypted_copied_from_encrypted_metadata_pseudo_copy', '--fetch-metadata', ], - additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()} + additional_env={'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode()}, ) b2_tool.should_succeed( [ @@ -1845,7 +2034,7 @@ def test_sse_c( additional_env={ 'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode(), 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode(os.urandom(32)).decode(), - } + }, ) b2_tool.should_succeed( [ @@ -1863,7 +2052,7 @@ def test_sse_c( 'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode(), 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode(os.urandom(32)).decode(), 'B2_DESTINATION_SSE_C_KEY_ID': 'another-user-generated-key-id', - } + }, ) b2_tool.should_succeed( [ @@ -1879,7 +2068,7 @@ def test_sse_c( 'B2_SOURCE_SSE_C_KEY_B64': base64.b64encode(secret).decode(), 'B2_DESTINATION_SSE_C_KEY_B64': base64.b64encode(os.urandom(32)).decode(), 'B2_DESTINATION_SSE_C_KEY_ID': 'another-user-generated-key-id', - } + }, ) list_of_files = b2_tool.should_succeed_json( ['ls', '--json', '--recursive', *b2_uri_args(bucket_name, subfolder)] @@ -1890,45 +2079,36 @@ def test_sse_c( { 'file_name': f'{subfolder}/encrypted_no_id_copied_from_encrypted', 'sse_c_key_id': 'missing_key', - 'serverSideEncryption': - { - "algorithm": "AES256", - "customerKey": "******", - "customerKeyMd5": "******", - "mode": "SSE-C" - }, + 'serverSideEncryption': { + 'algorithm': 'AES256', + 'customerKey': '******', + 'customerKeyMd5': '******', + 'mode': 'SSE-C', + }, }, { - 'file_name': - f'{subfolder}/encrypted_with_id_copied_from_encrypted_metadata_pseudo_copy', - 'sse_c_key_id': - 'another-user-generated-key-id', - 'serverSideEncryption': - { - 'algorithm': 'AES256', - "customerKey": "******", - "customerKeyMd5": "******", - 'mode': 'SSE-C', - }, + 'file_name': f'{subfolder}/encrypted_with_id_copied_from_encrypted_metadata_pseudo_copy', + 'sse_c_key_id': 'another-user-generated-key-id', + 'serverSideEncryption': { + 'algorithm': 'AES256', + 'customerKey': '******', + 'customerKeyMd5': '******', + 'mode': 'SSE-C', + }, }, { - 'file_name': - f'{subfolder}/encrypted_with_id_copied_from_encrypted_metadata_replace', - 'sse_c_key_id': - 'another-user-generated-key-id', - 'serverSideEncryption': - { - 'algorithm': 'AES256', - "customerKey": "******", - "customerKeyMd5": "******", - 'mode': 'SSE-C', - }, + 'file_name': f'{subfolder}/encrypted_with_id_copied_from_encrypted_metadata_replace', + 'sse_c_key_id': 'another-user-generated-key-id', + 'serverSideEncryption': { + 'algorithm': 'AES256', + 'customerKey': '******', + 'customerKeyMd5': '******', + 'mode': 'SSE-C', + }, }, { - 'file_name': - f'{subfolder}/not_encrypted_copied_from_encrypted_metadata_pseudo_copy', - 'sse_c_key_id': - 'missing_key', + 'file_name': f'{subfolder}/not_encrypted_copied_from_encrypted_metadata_pseudo_copy', + 'sse_c_key_id': 'missing_key', 'serverSideEncryption': { 'mode': 'none', }, @@ -1941,10 +2121,8 @@ def test_sse_c( }, }, { - 'file_name': - f'{subfolder}/not_encrypted_copied_from_encrypted_metadata_replace_empty', - 'sse_c_key_id': - 'missing_key', + 'file_name': f'{subfolder}/not_encrypted_copied_from_encrypted_metadata_replace_empty', + 'sse_c_key_id': 'missing_key', 'serverSideEncryption': { 'mode': 'none', }, @@ -1952,36 +2130,35 @@ def test_sse_c( { 'file_name': f'{subfolder}/uploaded_encrypted', 'sse_c_key_id': sse_c_key_id, - 'serverSideEncryption': - { - "algorithm": "AES256", - "customerKey": "******", - "customerKeyMd5": "******", - "mode": "SSE-C" - }, + 'serverSideEncryption': { + 'algorithm': 'AES256', + 'customerKey': '******', + 'customerKeyMd5': '******', + 'mode': 'SSE-C', + }, }, ], sorted( [ { - 'sse_c_key_id': - f['fileInfo'].get(SSE_C_KEY_ID_FILE_INFO_KEY_NAME, 'missing_key'), - 'serverSideEncryption': - f['serverSideEncryption'], - 'file_name': - f['fileName'] - } for f in list_of_files + 'sse_c_key_id': f['fileInfo'].get( + SSE_C_KEY_ID_FILE_INFO_KEY_NAME, 'missing_key' + ), + 'serverSideEncryption': f['serverSideEncryption'], + 'file_name': f['fileName'], + } + for f in list_of_files ], - key=lambda r: r['file_name'] - ) + key=lambda r: r['file_name'], + ), ) @pytest.mark.skipif( (sys.version_info.major, sys.version_info.minor) < (3, 9), reason="License extraction doesn't work on older versions, and we're only " - "obliged to provide this " - "data in bundled and built packages." + 'obliged to provide this ' + 'data in bundled and built packages.', ) @pytest.mark.parametrize('with_packages', [True, False]) def test_license(b2_tool, with_packages, cli_version): @@ -1995,34 +2172,36 @@ def test_license(b2_tool, with_packages, cli_version): # Thus, I'm allowing here for the test of licenses to pass whenever # the binary is named `b2` or with the proper cli version string (e.g. `_b2v4` or `b2v3`). full_license_re = re.compile( - fr'Licenses of all modules used by ({cli_version}|b2)(\.EXE)?, shipped with it in binary form:\r?\n' + rf'Licenses of all modules used by ({cli_version}|b2)(\.EXE)?, shipped with it in binary form:\r?\n' r'\+-*\+-*\+\r?\n' r'\|\s*Module name\s*\|\s*License text\s*\|\r?\n' r'.*' - r'\+-*\+-*\+\r?\n', re.MULTILINE + re.DOTALL + r'\+-*\+-*\+\r?\n', + re.MULTILINE + re.DOTALL, ) full_license_text = next(full_license_re.finditer(license_text), None) assert full_license_text, license_text - assert len( - full_license_text.group(0) - ) > 140_000 # we should know if the length of this block changes dramatically + assert ( + len(full_license_text.group(0)) > 140_000 + ) # we should know if the length of this block changes dramatically # Note that GitHub CI adds additional packages: # 'colorlog', 'virtualenv', 'nox', 'packaging', 'argcomplete', 'filelock' # that sum up to around 50k characters. Tests ran from docker image are unaffected. # See the explanation above for why both `b2` and `cli_version` are allowed here. license_summary_re = re.compile( - fr'Summary of all modules used by ({cli_version}|b2)(\.EXE)?, shipped with it in binary form:\r?\n' + rf'Summary of all modules used by ({cli_version}|b2)(\.EXE)?, shipped with it in binary form:\r?\n' r'\+-*\+-*\+-*\+-*\+-*\+\r?\n' r'\|\s*Module name\s*\|\s*Version\s*\|\s*License\s*\|\s*Author\s*\|\s*URL\s*\|\r?\n' r'.*' - r'\+-*\+-*\+-*\+-*\+-*\+\r?\n', re.MULTILINE + re.DOTALL + r'\+-*\+-*\+-*\+-*\+-*\+\r?\n', + re.MULTILINE + re.DOTALL, ) license_summary_text = next(license_summary_re.finditer(license_text), None) assert license_summary_text, license_text - assert len( - license_summary_text.group(0) - ) > 6_300 # we should know if the length of this block changes dramatically + assert ( + len(license_summary_text.group(0)) > 6_300 + ) # we should know if the length of this block changes dramatically assert """ license: Backblaze wants developers and organization to copy and re-use our @@ -2057,8 +2236,12 @@ def test_license(b2_tool, with_packages, cli_version): def test_file_lock( - b2_tool, application_key_id, application_key, sample_file, bucket_factory, - schedule_bucket_cleanup + b2_tool, + application_key_id, + application_key, + sample_file, + bucket_factory, + schedule_bucket_cleanup, ): lock_disabled_bucket_name = bucket_factory(bucket_type='allPrivate').name @@ -2072,7 +2255,7 @@ def test_file_lock( b2_tool, not_lockable_file['fileId'], retention_mode=RetentionMode.NONE, - legal_hold=LegalHold.UNSET + legal_hold=LegalHold.UNSET, ) b2_tool.should_fail( @@ -2089,20 +2272,33 @@ def test_file_lock( str(now_millis + 1.5 * ONE_HOUR_MILLIS), '--legal-hold', 'on', - ], r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)' + ], + r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)', ) b2_tool.should_fail( [ - 'bucket', 'update', lock_disabled_bucket_name, 'allPrivate', '--default-retention-mode', - 'compliance' - ], 'ValueError: must specify period for retention mode RetentionMode.COMPLIANCE' + 'bucket', + 'update', + lock_disabled_bucket_name, + 'allPrivate', + '--default-retention-mode', + 'compliance', + ], + 'ValueError: must specify period for retention mode RetentionMode.COMPLIANCE', ) b2_tool.should_fail( [ - 'bucket', 'update', lock_disabled_bucket_name, 'allPrivate', '--default-retention-mode', - 'compliance', '--default-retention-period', '7 days' - ], r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)' + 'bucket', + 'update', + lock_disabled_bucket_name, + 'allPrivate', + '--default-retention-mode', + 'compliance', + '--default-retention-period', + '7 days', + ], + r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)', ) lock_enabled_bucket_name = b2_tool.generate_bucket_name() schedule_bucket_cleanup(lock_enabled_bucket_name) @@ -2143,10 +2339,14 @@ def test_file_lock( # deprecated command b2_tool.should_fail( [ - 'update-file-retention', not_lockable_file['fileName'], not_lockable_file['fileId'], - 'governance', '--retain-until', - str(now_millis + ONE_DAY_MILLIS + ONE_HOUR_MILLIS) - ], r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)' + 'update-file-retention', + not_lockable_file['fileName'], + not_lockable_file['fileId'], + 'governance', + '--retain-until', + str(now_millis + ONE_DAY_MILLIS + ONE_HOUR_MILLIS), + ], + r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)', ) # deprecated command @@ -2156,8 +2356,14 @@ def test_file_lock( ) ) b2_tool.should_succeed( # first let's try with a file name - ['update-file-retention', lockable_file['fileName'], lockable_file['fileId'], 'governance', - '--retain-until', str(now_millis + ONE_DAY_MILLIS + ONE_HOUR_MILLIS)], + [ + 'update-file-retention', + lockable_file['fileName'], + lockable_file['fileId'], + 'governance', + '--retain-until', + str(now_millis + ONE_DAY_MILLIS + ONE_HOUR_MILLIS), + ], expected_stderr_pattern=update_file_retention_deprecated_pattern, ) @@ -2168,33 +2374,51 @@ def test_file_lock( b2_tool, lockable_file['fileId'], retention_mode=RetentionMode.GOVERNANCE, - retain_until=now_millis + ONE_DAY_MILLIS + ONE_HOUR_MILLIS + retain_until=now_millis + ONE_DAY_MILLIS + ONE_HOUR_MILLIS, ) b2_tool.should_succeed( # and now without a file name - ['file', 'update', '--file-retention-mode', 'governance', - '--retain-until', str(now_millis + ONE_DAY_MILLIS + 2 * ONE_HOUR_MILLIS), lockable_b2uri], + [ + 'file', + 'update', + '--file-retention-mode', + 'governance', + '--retain-until', + str(now_millis + ONE_DAY_MILLIS + 2 * ONE_HOUR_MILLIS), + lockable_b2uri, + ], ) _assert_file_lock_configuration( b2_tool, lockable_file['fileId'], retention_mode=RetentionMode.GOVERNANCE, - retain_until=now_millis + ONE_DAY_MILLIS + 2 * ONE_HOUR_MILLIS + retain_until=now_millis + ONE_DAY_MILLIS + 2 * ONE_HOUR_MILLIS, ) b2_tool.should_fail( [ - 'file', 'update', '--file-retention-mode', 'governance', '--retain-until', - str(now_millis + ONE_HOUR_MILLIS), lockable_b2uri + 'file', + 'update', + '--file-retention-mode', + 'governance', + '--retain-until', + str(now_millis + ONE_HOUR_MILLIS), + lockable_b2uri, ], "ERROR: Auth token not authorized to write retention or file already in 'compliance' mode or " - "bypassGovernance=true parameter missing", + 'bypassGovernance=true parameter missing', ) b2_tool.should_succeed( [ - 'file', 'update', '--file-retention-mode', 'governance', '--retain-until', - str(now_millis + ONE_HOUR_MILLIS), '--bypass-governance', lockable_b2uri + 'file', + 'update', + '--file-retention-mode', + 'governance', + '--retain-until', + str(now_millis + ONE_HOUR_MILLIS), + '--bypass-governance', + lockable_b2uri, ], ) @@ -2202,13 +2426,13 @@ def test_file_lock( b2_tool, lockable_file['fileId'], retention_mode=RetentionMode.GOVERNANCE, - retain_until=now_millis + ONE_HOUR_MILLIS + retain_until=now_millis + ONE_HOUR_MILLIS, ) b2_tool.should_fail( ['file', 'update', '--file-retention-mode', 'none', lockable_b2uri], "ERROR: Auth token not authorized to write retention or file already in 'compliance' mode or " - "bypassGovernance=true parameter missing", + 'bypassGovernance=true parameter missing', ) b2_tool.should_succeed( ['file', 'update', '--file-retention-mode', 'none', '--bypass-governance', lockable_b2uri], @@ -2220,7 +2444,7 @@ def test_file_lock( b2_tool.should_fail( ['file', 'update', '--legal-hold', 'on', not_lockable_b2uri], - r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)' + r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)', ) # deprecated command @@ -2294,7 +2518,7 @@ def test_file_lock( uploaded_file['fileId'], retention_mode=RetentionMode.GOVERNANCE, retain_until=now_millis + 1.5 * ONE_HOUR_MILLIS, - legal_hold=LegalHold.ON + legal_hold=LegalHold.ON, ) b2_tool.should_fail( @@ -2309,7 +2533,8 @@ def test_file_lock( str(now_millis + 1.25 * ONE_HOUR_MILLIS), '--legal-hold', 'off', - ], r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)' + ], + r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)', ) copied_file = b2_tool.should_succeed_json( @@ -2332,14 +2557,18 @@ def test_file_lock( copied_file['fileId'], retention_mode=RetentionMode.GOVERNANCE, retain_until=now_millis + 1.25 * ONE_HOUR_MILLIS, - legal_hold=LegalHold.OFF + legal_hold=LegalHold.OFF, ) lock_disabled_key_id, lock_disabled_key = make_lock_disabled_key(b2_tool) b2_tool.should_succeed( [ - 'account', 'authorize', '--environment', b2_tool.realm, lock_disabled_key_id, - lock_disabled_key + 'account', + 'authorize', + '--environment', + b2_tool.realm, + lock_disabled_key_id, + lock_disabled_key, ], ) @@ -2351,13 +2580,17 @@ def test_file_lock( not_lockable_file['fileId'], lockable_b2uri, not_lockable_b2uri, - sample_file=sample_file + sample_file=sample_file, ) b2_tool.should_succeed( [ - 'account', 'authorize', '--environment', b2_tool.realm, application_key_id, - application_key + 'account', + 'authorize', + '--environment', + b2_tool.realm, + application_key_id, + application_key, ], ) @@ -2381,14 +2614,25 @@ def make_lock_disabled_key(b2_tool): def file_lock_without_perms_test( - b2_tool, lock_enabled_bucket_name, lock_disabled_bucket_name, lockable_file_id, - not_lockable_file_id, lockable_b2uri, not_lockable_b2uri, sample_file + b2_tool, + lock_enabled_bucket_name, + lock_disabled_bucket_name, + lockable_file_id, + not_lockable_file_id, + lockable_b2uri, + not_lockable_b2uri, + sample_file, ): - b2_tool.should_fail( [ - 'bucket', 'update', lock_enabled_bucket_name, 'allPrivate', '--default-retention-mode', - 'governance', '--default-retention-period', '1 days' + 'bucket', + 'update', + lock_enabled_bucket_name, + 'allPrivate', + '--default-retention-mode', + 'governance', + '--default-retention-period', + '1 days', ], 'ERROR: unauthorized for application key with capabilities', ) @@ -2397,37 +2641,47 @@ def file_lock_without_perms_test( b2_tool, lockable_file_id, retention_mode=RetentionMode.UNKNOWN, - legal_hold=LegalHold.UNKNOWN + legal_hold=LegalHold.UNKNOWN, ) b2_tool.should_fail( [ - 'file', 'update', '--file-retention-mode', 'governance', '--retain-until', - str(current_time_millis() + 7 * ONE_DAY_MILLIS), lockable_b2uri + 'file', + 'update', + '--file-retention-mode', + 'governance', + '--retain-until', + str(current_time_millis() + 7 * ONE_DAY_MILLIS), + lockable_b2uri, ], "ERROR: Auth token not authorized to write retention or file already in 'compliance' mode or " - "bypassGovernance=true parameter missing", + 'bypassGovernance=true parameter missing', ) b2_tool.should_fail( [ - 'file', 'update', '--file-retention-mode', 'governance', '--retain-until', - str(current_time_millis() + 7 * ONE_DAY_MILLIS), not_lockable_b2uri + 'file', + 'update', + '--file-retention-mode', + 'governance', + '--retain-until', + str(current_time_millis() + 7 * ONE_DAY_MILLIS), + not_lockable_b2uri, ], "ERROR: Auth token not authorized to write retention or file already in 'compliance' mode or " - "bypassGovernance=true parameter missing", + 'bypassGovernance=true parameter missing', ) b2_tool.should_fail( ['file', 'update', '--legal-hold', 'on', lockable_b2uri], "ERROR: Auth token not authorized to write retention or file already in 'compliance' mode or " - "bypassGovernance=true parameter missing", + 'bypassGovernance=true parameter missing', ) b2_tool.should_fail( ['file', 'update', '--legal-hold', 'on', not_lockable_b2uri], "ERROR: Auth token not authorized to write retention or file already in 'compliance' mode or " - "bypassGovernance=true parameter missing", + 'bypassGovernance=true parameter missing', ) b2_tool.should_fail( @@ -2446,7 +2700,7 @@ def file_lock_without_perms_test( '--legal-hold', 'on', ], - "unauthorized for application key with capabilities", + 'unauthorized for application key with capabilities', ) b2_tool.should_fail( @@ -2465,7 +2719,7 @@ def file_lock_without_perms_test( '--legal-hold', 'on', ], - "unauthorized for application key with capabilities", + 'unauthorized for application key with capabilities', ) b2_tool.should_fail( @@ -2529,32 +2783,41 @@ def deleting_locked_files( locked_file['fileName'], locked_file['fileId'], ], - "ERROR: Access Denied for application key " + 'ERROR: Access Denied for application key ', ) - b2_tool.should_succeed([ # master key + b2_tool.should_succeed( + [ # master key 'delete-file-version', locked_file['fileName'], locked_file['fileId'], - '--bypass-governance' - ], expected_stderr_pattern=re.compile(re.escape( - 'WARNING: `delete-file-version` command is deprecated. Use `rm` instead.' - )) + '--bypass-governance', + ], + expected_stderr_pattern=re.compile( + re.escape('WARNING: `delete-file-version` command is deprecated. Use `rm` instead.') + ), ) locked_file = upload_locked_file(b2_tool, lock_enabled_bucket_name, sample_file) b2_tool.should_succeed( [ - 'account', 'authorize', '--environment', b2_tool.realm, lock_disabled_key_id, - lock_disabled_key + 'account', + 'authorize', + '--environment', + b2_tool.realm, + lock_disabled_key_id, + lock_disabled_key, ], ) - b2_tool.should_fail([ # lock disabled key - 'delete-file-version', - locked_file['fileName'], - locked_file['fileId'], - '--bypass-governance', - ], "ERROR: unauthorized for application key with capabilities '") + b2_tool.should_fail( + [ # lock disabled key + 'delete-file-version', + locked_file['fileName'], + locked_file['fileId'], + '--bypass-governance', + ], + "ERROR: unauthorized for application key with capabilities '", + ) @pytest.mark.apiver(from_ver=4) @@ -2597,7 +2860,7 @@ def test_deleting_locked_files_v4(b2_tool, sample_file, schedule_bucket_cleanup) 'rm', f"b2id://{locked_file['fileId']}", ], - " failed: Access Denied for application key " + ' failed: Access Denied for application key ', ) b2_tool.should_succeed( [ # master key @@ -2612,8 +2875,12 @@ def test_deleting_locked_files_v4(b2_tool, sample_file, schedule_bucket_cleanup) lock_disabled_key_id, lock_disabled_key = make_lock_disabled_key(b2_tool) b2_tool.should_succeed( [ - 'account', 'authorize', '--environment', b2_tool.realm, lock_disabled_key_id, - lock_disabled_key + 'account', + 'authorize', + '--environment', + b2_tool.realm, + lock_disabled_key_id, + lock_disabled_key, ], ) @@ -2623,7 +2890,7 @@ def test_deleting_locked_files_v4(b2_tool, sample_file, schedule_bucket_cleanup) '--bypass-governance', f"b2id://{locked_file['fileId']}", ], - " failed: unauthorized for application key with capabilities '" + " failed: unauthorized for application key with capabilities '", ) @@ -2673,8 +2940,9 @@ def test_profile_switch(b2_tool): account_info = b2_tool.should_succeed_json(['account', 'get', '--profile', profile]) account_file_path = account_info['accountFilePath'] - assert profile in account_file_path, \ - f'accountFilePath "{account_file_path}" should contain profile name "{profile}"' + assert ( + profile in account_file_path + ), f'accountFilePath "{account_file_path}" should contain profile name "{profile}"' b2_tool.should_succeed(['account', 'clear', '--profile', profile]) b2_tool.should_fail( @@ -2742,34 +3010,34 @@ def test_replication_basic(b2_tool, bucket_name, schedule_bucket_cleanup): # test that destination bucket is registered as replication destination assert destination_bucket['replication'].get('asReplicationSource') is None - assert destination_bucket['replication' - ]['asReplicationDestination' - ] == destination_replication_configuration['asReplicationDestination'] + assert ( + destination_bucket['replication']['asReplicationDestination'] + == destination_replication_configuration['asReplicationDestination'] + ) # ---------------- set up replication source ---------------- source_replication_configuration = { - "asReplicationSource": - { - "replicationRules": - [ - { - "destinationBucketId": destination_bucket['bucketId'], - "fileNamePrefix": "one/", - "includeExistingFiles": False, - "isEnabled": True, - "priority": 1, - "replicationRuleName": "replication-one" - }, { - "destinationBucketId": destination_bucket['bucketId'], - "fileNamePrefix": "two/", - "includeExistingFiles": False, - "isEnabled": True, - "priority": 2, - "replicationRuleName": "replication-two" - } - ], - "sourceApplicationKeyId": key_one_id, - }, + 'asReplicationSource': { + 'replicationRules': [ + { + 'destinationBucketId': destination_bucket['bucketId'], + 'fileNamePrefix': 'one/', + 'includeExistingFiles': False, + 'isEnabled': True, + 'priority': 1, + 'replicationRuleName': 'replication-one', + }, + { + 'destinationBucketId': destination_bucket['bucketId'], + 'fileNamePrefix': 'two/', + 'includeExistingFiles': False, + 'isEnabled': True, + 'priority': 2, + 'replicationRuleName': 'replication-two', + }, + ], + 'sourceApplicationKeyId': key_one_id, + }, } source_replication_configuration_json = json.dumps(source_replication_configuration) @@ -2790,8 +3058,10 @@ def test_replication_basic(b2_tool, bucket_name, schedule_bucket_cleanup): source_bucket = b2_tool.should_succeed_json(['bucket', 'get', source_bucket_name]) # test that all replication rules are present in source bucket - assert source_bucket['replication']['asReplicationSource' - ] == source_replication_configuration['asReplicationSource'] + assert ( + source_bucket['replication']['asReplicationSource'] + == source_replication_configuration['asReplicationSource'] + ) # test that source bucket is not mentioned as replication destination assert source_bucket['replication'].get('asReplicationDestination') is None @@ -2799,7 +3069,7 @@ def test_replication_basic(b2_tool, bucket_name, schedule_bucket_cleanup): # ---------------- attempt enabling object lock ---------------- b2_tool.should_fail( ['bucket', 'update', source_bucket_name, '--file-lock-enabled'], - 'ERROR: Operation not supported for buckets with source replication' + 'ERROR: Operation not supported for buckets with source replication', ) # ---------------- remove replication source ---------------- @@ -2811,15 +3081,19 @@ def test_replication_basic(b2_tool, bucket_name, schedule_bucket_cleanup): no_replication_configuration_json = json.dumps(no_replication_configuration) source_bucket = b2_tool.should_succeed_json( [ - 'bucket', 'update', source_bucket_name, 'allPublic', '--replication', - no_replication_configuration_json + 'bucket', + 'update', + source_bucket_name, + 'allPublic', + '--replication', + no_replication_configuration_json, ] ) # test that source bucket replication is removed assert source_bucket['replication'] == { 'asReplicationDestination': None, - 'asReplicationSource': None + 'asReplicationSource': None, } # ---------------- remove replication destination ---------------- @@ -2838,7 +3112,7 @@ def test_replication_basic(b2_tool, bucket_name, schedule_bucket_cleanup): # test that destination bucket replication is removed assert destination_bucket['replication'] == { 'asReplicationDestination': None, - 'asReplicationSource': None + 'asReplicationSource': None, } b2_tool.should_succeed(['key', 'delete', key_one_id]) @@ -2860,7 +3134,9 @@ def base_test_replication_setup(b2_tool, bucket_name, schedule_bucket_cleanup, u 'WARNING: `replication-setup` command is deprecated. Use `replication setup` instead.' ) ) - replication_setup_expected_stderr_pattern = None if use_subcommands else replication_setup_deprecated_pattern + replication_setup_expected_stderr_pattern = ( + None if use_subcommands else replication_setup_deprecated_pattern + ) source_bucket_name = b2_tool.generate_bucket_name() schedule_bucket_cleanup(source_bucket_name) @@ -2877,7 +3153,7 @@ def base_test_replication_setup(b2_tool, bucket_name, schedule_bucket_cleanup, u destination_bucket_name = bucket_name b2_tool.should_succeed( [*setup_cmd, source_bucket_name, destination_bucket_name], - expected_stderr_pattern=replication_setup_expected_stderr_pattern + expected_stderr_pattern=replication_setup_expected_stderr_pattern, ) destination_bucket_old = b2_tool.should_succeed_json(['bucket', 'get', destination_bucket_name]) @@ -2899,34 +3175,39 @@ def base_test_replication_setup(b2_tool, bucket_name, schedule_bucket_cleanup, u destination_bucket = b2_tool.should_succeed_json(['bucket', 'get', destination_bucket_name]) assert source_bucket['replication']['asReplicationSource']['replicationRules'] == [ { - "destinationBucketId": destination_bucket['bucketId'], - "fileNamePrefix": "", - "includeExistingFiles": False, - "isEnabled": True, - "priority": 128, - "replicationRuleName": destination_bucket['bucketName'], + 'destinationBucketId': destination_bucket['bucketId'], + 'fileNamePrefix': '', + 'includeExistingFiles': False, + 'isEnabled': True, + 'priority': 128, + 'replicationRuleName': destination_bucket['bucketName'], }, { - "destinationBucketId": destination_bucket['bucketId'], - "fileNamePrefix": "foo", - "includeExistingFiles": False, - "isEnabled": True, - "priority": 132, - "replicationRuleName": "my-replication-rule", + 'destinationBucketId': destination_bucket['bucketId'], + 'fileNamePrefix': 'foo', + 'includeExistingFiles': False, + 'isEnabled': True, + 'priority': 132, + 'replicationRuleName': 'my-replication-rule', }, ] for key_one_id, key_two_id in destination_bucket['replication']['asReplicationDestination'][ - 'sourceToDestinationKeyMapping'].items(): + 'sourceToDestinationKeyMapping' + ].items(): b2_tool.should_succeed(['key', 'delete', key_one_id]) b2_tool.should_succeed(['key', 'delete', key_two_id]) - assert destination_bucket_old['replication']['asReplicationDestination'][ - 'sourceToDestinationKeyMapping'] == destination_bucket['replication'][ - 'asReplicationDestination']['sourceToDestinationKeyMapping'] + assert ( + destination_bucket_old['replication']['asReplicationDestination'][ + 'sourceToDestinationKeyMapping' + ] + == destination_bucket['replication']['asReplicationDestination'][ + 'sourceToDestinationKeyMapping' + ] + ) def test_replication_monitoring(b2_tool, bucket_name, sample_file, schedule_bucket_cleanup): - # ---------------- set up keys ---------------- key_one_name = 'clt-testKey-01' + random_hex(6) created_key_stdout = b2_tool.should_succeed( @@ -2981,28 +3262,27 @@ def test_replication_monitoring(b2_tool, bucket_name, sample_file, schedule_buck # ---------------- set up replication source ---------------- source_replication_configuration = { - "asReplicationSource": - { - "replicationRules": - [ - { - "destinationBucketId": destination_bucket['bucketId'], - "fileNamePrefix": "one/", - "includeExistingFiles": False, - "isEnabled": True, - "priority": 1, - "replicationRuleName": "replication-one" - }, { - "destinationBucketId": destination_bucket['bucketId'], - "fileNamePrefix": "two/", - "includeExistingFiles": False, - "isEnabled": True, - "priority": 2, - "replicationRuleName": "replication-two" - } - ], - "sourceApplicationKeyId": key_one_id, - }, + 'asReplicationSource': { + 'replicationRules': [ + { + 'destinationBucketId': destination_bucket['bucketId'], + 'fileNamePrefix': 'one/', + 'includeExistingFiles': False, + 'isEnabled': True, + 'priority': 1, + 'replicationRuleName': 'replication-one', + }, + { + 'destinationBucketId': destination_bucket['bucketId'], + 'fileNamePrefix': 'two/', + 'includeExistingFiles': False, + 'isEnabled': True, + 'priority': 2, + 'replicationRuleName': 'replication-two', + }, + ], + 'sourceApplicationKeyId': key_one_id, + }, } source_replication_configuration_json = json.dumps(source_replication_configuration) @@ -3044,8 +3324,8 @@ def test_replication_monitoring(b2_tool, bucket_name, sample_file, schedule_buck upload_encryption_args = ['--destination-server-side-encryption', 'SSE-B2'] upload_additional_env = {} b2_tool.should_succeed_json( - ['file', 'upload', '--quiet', source_bucket_name, sample_file, 'two/c'] + - upload_encryption_args, + ['file', 'upload', '--quiet', source_bucket_name, sample_file, 'two/c'] + + upload_encryption_args, additional_env=upload_additional_env, ) @@ -3056,8 +3336,8 @@ def test_replication_monitoring(b2_tool, bucket_name, sample_file, schedule_buck 'B2_DESTINATION_SSE_C_KEY_ID': SSE_C_AES.key.key_id, } b2_tool.should_succeed_json( - ['file', 'upload', '--quiet', source_bucket_name, sample_file, 'two/d'] + - upload_encryption_args, + ['file', 'upload', '--quiet', source_bucket_name, sample_file, 'two/d'] + + upload_encryption_args, additional_env=upload_additional_env, ) @@ -3072,7 +3352,8 @@ def test_replication_monitoring(b2_tool, bucket_name, sample_file, schedule_buck 'two/e', '--legal-hold', 'on', - ] + upload_encryption_args, + ] + + upload_encryption_args, additional_env=upload_additional_env, ) @@ -3081,7 +3362,7 @@ def test_replication_monitoring(b2_tool, bucket_name, sample_file, schedule_buck ['delete-file-version', uploaded_a['fileName'], uploaded_a['fileId']], expected_stderr_pattern=re.compile( re.escape('WARNING: `delete-file-version` command is deprecated. Use `rm` instead.') - ) + ), ) # run stats command @@ -3101,7 +3382,7 @@ def test_replication_monitoring(b2_tool, bucket_name, sample_file, schedule_buck 'json', source_bucket_name, ], - expected_stderr_pattern=replication_status_deprecated_pattern + expected_stderr_pattern=replication_status_deprecated_pattern, ) replication_status_json = b2_tool.should_succeed_json( @@ -3120,70 +3401,72 @@ def test_replication_monitoring(b2_tool, bucket_name, sample_file, schedule_buck assert replication_status_json in [ { - "replication-one": - [ - { - "count": 1, - "destination_replication_status": None, - "hash_differs": None, - "metadata_differs": None, - "source_has_file_retention": None, - "source_has_hide_marker": None, - "source_has_large_metadata": None, - "source_has_legal_hold": None, - "source_encryption_mode": None, - "source_replication_status": None, - } - ], - "replication-two": - [ - { - "count": 1, - "destination_replication_status": None, - "hash_differs": None, - "metadata_differs": None, - "source_has_file_retention": False, - "source_has_hide_marker": False, - "source_has_large_metadata": False, - "source_has_legal_hold": True, - "source_encryption_mode": 'none', - "source_replication_status": first, - }, { - "count": 1, - "destination_replication_status": None, - "hash_differs": None, - "metadata_differs": None, - "source_has_file_retention": False, - "source_has_hide_marker": False, - "source_has_large_metadata": False, - "source_has_legal_hold": False, - "source_encryption_mode": 'SSE-B2', - "source_replication_status": second, - }, { - "count": 1, - "destination_replication_status": None, - "hash_differs": None, - "metadata_differs": None, - "source_has_file_retention": False, - "source_has_hide_marker": False, - "source_has_large_metadata": False, - "source_has_legal_hold": False, - "source_encryption_mode": 'SSE-C', - "source_replication_status": None, - }, { - "count": 1, - "destination_replication_status": None, - "hash_differs": None, - "metadata_differs": None, - "source_has_file_retention": False, - "source_has_hide_marker": False, - "source_has_large_metadata": False, - "source_has_legal_hold": True, - "source_encryption_mode": 'SSE-C', - "source_replication_status": None, - } - ] - } for first, second in itertools.product(['FAILED', 'PENDING'], ['FAILED', 'PENDING']) + 'replication-one': [ + { + 'count': 1, + 'destination_replication_status': None, + 'hash_differs': None, + 'metadata_differs': None, + 'source_has_file_retention': None, + 'source_has_hide_marker': None, + 'source_has_large_metadata': None, + 'source_has_legal_hold': None, + 'source_encryption_mode': None, + 'source_replication_status': None, + } + ], + 'replication-two': [ + { + 'count': 1, + 'destination_replication_status': None, + 'hash_differs': None, + 'metadata_differs': None, + 'source_has_file_retention': False, + 'source_has_hide_marker': False, + 'source_has_large_metadata': False, + 'source_has_legal_hold': True, + 'source_encryption_mode': 'none', + 'source_replication_status': first, + }, + { + 'count': 1, + 'destination_replication_status': None, + 'hash_differs': None, + 'metadata_differs': None, + 'source_has_file_retention': False, + 'source_has_hide_marker': False, + 'source_has_large_metadata': False, + 'source_has_legal_hold': False, + 'source_encryption_mode': 'SSE-B2', + 'source_replication_status': second, + }, + { + 'count': 1, + 'destination_replication_status': None, + 'hash_differs': None, + 'metadata_differs': None, + 'source_has_file_retention': False, + 'source_has_hide_marker': False, + 'source_has_large_metadata': False, + 'source_has_legal_hold': False, + 'source_encryption_mode': 'SSE-C', + 'source_replication_status': None, + }, + { + 'count': 1, + 'destination_replication_status': None, + 'hash_differs': None, + 'metadata_differs': None, + 'source_has_file_retention': False, + 'source_has_hide_marker': False, + 'source_has_large_metadata': False, + 'source_has_legal_hold': True, + 'source_encryption_mode': 'SSE-C', + 'source_replication_status': None, + }, + ], + } + for first, second in itertools.product(['FAILED', 'PENDING'], ['FAILED', 'PENDING']) ] @@ -3194,8 +3477,13 @@ def test_enable_file_lock_first_retention_second(b2_tool, bucket_name): # set retention with file lock already enabled b2_tool.should_succeed( [ - 'bucket', 'update', bucket_name, '--default-retention-mode', 'compliance', - '--default-retention-period', '7 days' + 'bucket', + 'update', + bucket_name, + '--default-retention-mode', + 'compliance', + '--default-retention-period', + '7 days', ] ) @@ -3207,16 +3495,28 @@ def test_enable_file_lock_and_set_retention_at_once(b2_tool, bucket_name): # attempt setting retention without file lock enabled b2_tool.should_fail( [ - 'bucket', 'update', bucket_name, '--default-retention-mode', 'compliance', - '--default-retention-period', '7 days' - ], r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)' + 'bucket', + 'update', + bucket_name, + '--default-retention-mode', + 'compliance', + '--default-retention-period', + '7 days', + ], + r'ERROR: The bucket is not file lock enabled \(bucket_missing_file_lock\)', ) # enable file lock and set retention at once b2_tool.should_succeed( [ - 'bucket', 'update', bucket_name, '--default-retention-mode', 'compliance', - '--default-retention-period', '7 days', '--file-lock-enabled' + 'bucket', + 'update', + bucket_name, + '--default-retention-mode', + 'compliance', + '--default-retention-period', + '7 days', + '--file-lock-enabled', ] ) @@ -3229,10 +3529,9 @@ def _assert_file_lock_configuration( file_id, retention_mode: RetentionMode | None = None, retain_until: int | None = None, - legal_hold: LegalHold | None = None + legal_hold: LegalHold | None = None, ): - - file_version = b2_tool.should_succeed_json(['file', 'info', f"b2id://{file_id}"]) + file_version = b2_tool.should_succeed_json(['file', 'info', f'b2id://{file_id}']) if retention_mode is not None: if file_version['fileRetention']['mode'] == 'unknown': actual_file_retention = UNKNOWN_FILE_RETENTION_SETTING @@ -3279,7 +3578,7 @@ def test_upload_file__custom_upload_time(b2_tool, persistent_bucket, sample_file # file_id, action, date, time, size(, replication), name b2_tool.should_succeed( ['ls', '--long', '--replication', *b2_uri_args(bucket_name, subfolder)], - f'^4_z.* upload {cut_printable} +{len(file_data)} - {subfolder}/a' + f'^4_z.* upload {cut_printable} +{len(file_data)} - {subfolder}/a', ) @@ -3317,15 +3616,24 @@ def test_upload_unbound_stream__redirect_operator( def test_download_file_stdout( b2_tool, persistent_bucket, sample_filepath, tmp_path, uploaded_sample_file ): - assert b2_tool.should_succeed( - [ - 'file', 'download', '--quiet', - f"b2://{persistent_bucket.bucket_name}/{uploaded_sample_file['fileName']}", '-' - ], - ) == sample_filepath.read_text() - assert b2_tool.should_succeed( - ['file', 'download', '--quiet', f"b2id://{uploaded_sample_file['fileId']}", '-'], - ) == sample_filepath.read_text() + assert ( + b2_tool.should_succeed( + [ + 'file', + 'download', + '--quiet', + f"b2://{persistent_bucket.bucket_name}/{uploaded_sample_file['fileName']}", + '-', + ], + ) + == sample_filepath.read_text() + ) + assert ( + b2_tool.should_succeed( + ['file', 'download', '--quiet', f"b2id://{uploaded_sample_file['fileId']}", '-'], + ) + == sample_filepath.read_text() + ) def test_download_file_to_directory( @@ -3348,8 +3656,9 @@ def test_download_file_to_directory( ], ) downloaded_file = target_directory / filename_as_path - assert downloaded_file.read_text() == sample_file_content, \ - f'{downloaded_file}, {downloaded_file.read_text()}, {sample_file_content}' + assert ( + downloaded_file.read_text() == sample_file_content + ), f'{downloaded_file}, {downloaded_file.read_text()}, {sample_file_content}' b2_tool.should_succeed( [ @@ -3367,29 +3676,42 @@ def test_download_file_to_directory( if filepath.name != filename_as_path.name ] assert len(new_files) == 1, f'{new_files}' - assert new_files[0].read_text() == sample_file_content, \ - f'{new_files}, {new_files[0].read_text()}, {sample_file_content}' + assert ( + new_files[0].read_text() == sample_file_content + ), f'{new_files}, {new_files[0].read_text()}, {sample_file_content}' def test_cat(b2_tool, persistent_bucket, sample_filepath, tmp_path, uploaded_sample_file): - assert b2_tool.should_succeed( - ['file', 'cat', f"b2://{persistent_bucket.bucket_name}/{uploaded_sample_file['fileName']}"], - ) == sample_filepath.read_text() - assert b2_tool.should_succeed(['file', 'cat', f"b2id://{uploaded_sample_file['fileId']}"] - ) == sample_filepath.read_text() + assert ( + b2_tool.should_succeed( + [ + 'file', + 'cat', + f"b2://{persistent_bucket.bucket_name}/{uploaded_sample_file['fileName']}", + ], + ) + == sample_filepath.read_text() + ) + assert ( + b2_tool.should_succeed(['file', 'cat', f"b2id://{uploaded_sample_file['fileId']}"]) + == sample_filepath.read_text() + ) def test_header_arguments(b2_tool, persistent_bucket, sample_filepath, tmp_path): bucket_name = persistent_bucket.bucket_name - # yapf: disable args = [ - '--cache-control', 'max-age=3600', - '--content-disposition', 'attachment', - '--content-encoding', 'gzip', - '--content-language', 'en', - '--expires', 'Thu, 01 Dec 2050 16:00:00 GMT', + '--cache-control', + 'max-age=3600', + '--content-disposition', + 'attachment', + '--content-encoding', + 'gzip', + '--content-language', + 'en', + '--expires', + 'Thu, 01 Dec 2050 16:00:00 GMT', ] - # yapf: enable expected_file_info = { 'b2-cache-control': 'max-age=3600', 'b2-content-disposition': 'attachment', @@ -3453,72 +3775,80 @@ def test_notification_rules(b2_tool, bucket_name): if 'writeBucketNotifications' not in auth_dict['allowed']['capabilities']: pytest.skip('Test account does not have writeBucketNotifications capability') - assert b2_tool.should_succeed_json( - ["bucket", "notification-rule", "list", f"b2://{bucket_name}", "--json"] - ) == [] + assert ( + b2_tool.should_succeed_json( + ['bucket', 'notification-rule', 'list', f'b2://{bucket_name}', '--json'] + ) + == [] + ) notification_rule = { - "eventTypes": ["b2:ObjectCreated:*"], - "isEnabled": True, - "name": "test-rule", - "objectNamePrefix": "", - "targetConfiguration": - { - "customHeaders": None, - "hmacSha256SigningSecret": None, - "targetType": "webhook", - "url": "https://example.com/webhook", - } + 'eventTypes': ['b2:ObjectCreated:*'], + 'isEnabled': True, + 'name': 'test-rule', + 'objectNamePrefix': '', + 'targetConfiguration': { + 'customHeaders': None, + 'hmacSha256SigningSecret': None, + 'targetType': 'webhook', + 'url': 'https://example.com/webhook', + }, } # add rule created_rule = b2_tool.should_succeed_json( [ - "bucket", - "notification-rule", - "create", - "--json", - f"b2://{bucket_name}", - "test-rule", - "--webhook-url", - "https://example.com/webhook", - "--event-type", - "b2:ObjectCreated:*", + 'bucket', + 'notification-rule', + 'create', + '--json', + f'b2://{bucket_name}', + 'test-rule', + '--webhook-url', + 'https://example.com/webhook', + '--event-type', + 'b2:ObjectCreated:*', ] ) - expected_rules = [{**notification_rule, "isSuspended": False, "suspensionReason": ""}] + expected_rules = [{**notification_rule, 'isSuspended': False, 'suspensionReason': ''}] assert_dict_equal_ignore_extra(created_rule, expected_rules[0]) # modify rule - secret = "0testSecret000000000000000000032" + secret = '0testSecret000000000000000000032' modified_rule = b2_tool.should_succeed_json( [ - "bucket", - "notification-rule", - "update", - "--json", - f"b2://{bucket_name}/prefix", - "test-rule", - "--disable", - "--sign-secret", + 'bucket', + 'notification-rule', + 'update', + '--json', + f'b2://{bucket_name}/prefix', + 'test-rule', + '--disable', + '--sign-secret', secret, ] ) - expected_rules[0].update({"objectNamePrefix": "prefix", "isEnabled": False}) - expected_rules[0]["targetConfiguration"]["hmacSha256SigningSecret"] = secret + expected_rules[0].update({'objectNamePrefix': 'prefix', 'isEnabled': False}) + expected_rules[0]['targetConfiguration']['hmacSha256SigningSecret'] = secret assert_dict_equal_ignore_extra(modified_rule, expected_rules[0]) # read updated rules assert_dict_equal_ignore_extra( b2_tool.should_succeed_json( - ["bucket", "notification-rule", "list", f"b2://{bucket_name}", "--json"] + ['bucket', 'notification-rule', 'list', f'b2://{bucket_name}', '--json'] ), expected_rules, ) # delete rule by name - assert b2_tool.should_succeed( - ["bucket", "notification-rule", "delete", f"b2://{bucket_name}", "test-rule"] - ) == f"Rule 'test-rule' has been deleted from b2://{bucket_name}/\n" - assert b2_tool.should_succeed_json( - ["bucket", "notification-rule", "list", f"b2://{bucket_name}", "--json"] - ) == [] + assert ( + b2_tool.should_succeed( + ['bucket', 'notification-rule', 'delete', f'b2://{bucket_name}', 'test-rule'] + ) + == f"Rule 'test-rule' has been deleted from b2://{bucket_name}/\n" + ) + assert ( + b2_tool.should_succeed_json( + ['bucket', 'notification-rule', 'list', f'b2://{bucket_name}', '--json'] + ) + == [] + ) diff --git a/test/integration/test_help.py b/test/integration/test_help.py index 5ad9cf538..96c289f7b 100644 --- a/test/integration/test_help.py +++ b/test/integration/test_help.py @@ -14,7 +14,7 @@ def test_help(cli_version): p = subprocess.run( - [cli_version, "--help"], + [cli_version, '--help'], check=True, capture_output=True, text=True, @@ -24,5 +24,5 @@ def test_help(cli_version): expected_name = cli_version if platform.system() == 'Windows': expected_name += '.exe' - assert re.match(r"^_?b2(v\d+)?(\.exe)?$", expected_name) # test sanity check - assert f"{expected_name} --help" in p.stdout + assert re.match(r'^_?b2(v\d+)?(\.exe)?$', expected_name) # test sanity check + assert f'{expected_name} --help' in p.stdout diff --git a/test/integration/test_tqdm_closer.py b/test/integration/test_tqdm_closer.py index 9786c1fab..4ac29e8dd 100644 --- a/test/integration/test_tqdm_closer.py +++ b/test/integration/test_tqdm_closer.py @@ -20,11 +20,13 @@ def test_tqdm_closer(b2_tool, bucket, file_name): # test that stderr doesn't contain any warning, in particular warnings about multiprocessing resource tracker # leaking semaphores - b2_tool.should_succeed([ - 'file', - 'cat', - f'b2://{bucket.name}/{file_name}', - ]) + b2_tool.should_succeed( + [ + 'file', + 'cat', + f'b2://{bucket.name}/{file_name}', + ] + ) # test that disabling _TqdmCloser does produce a resource tracker warning. Should the following check ever fail, # that would mean that either Tqdm or python fixed the issue and _TqdmCloser can be disabled for fixed versions diff --git a/test/static/test_licenses.py b/test/static/test_licenses.py index 670142e88..537ace8e6 100644 --- a/test/static/test_licenses.py +++ b/test/static/test_licenses.py @@ -13,7 +13,7 @@ import pytest -FIXER_CMD = "python test/static/test_licenses.py" +FIXER_CMD = 'python test/static/test_licenses.py' LICENSE_HEADER_TMPL = """\ ###################################################################### # @@ -56,10 +56,12 @@ def insert_header(file_path: str): with open(file_path, 'r+') as fd: content = fd.read() fd.seek(0) - fd.write(LICENSE_HEADER_TMPL.format( - path=file_path, - year=datetime.now().year, - )) + fd.write( + LICENSE_HEADER_TMPL.format( + path=file_path, + year=datetime.now().year, + ) + ) fd.write(content) diff --git a/test/unit/_cli/fixtures/dummy_command.py b/test/unit/_cli/fixtures/dummy_command.py index 82ec4b76f..eafdb1c7c 100755 --- a/test/unit/_cli/fixtures/dummy_command.py +++ b/test/unit/_cli/fixtures/dummy_command.py @@ -12,13 +12,13 @@ def main(): - parser = argparse.ArgumentParser(description="Dummy command") - parser.add_argument("--foo", help="foo help") - parser.add_argument("--bar", help="bar help") + parser = argparse.ArgumentParser(description='Dummy command') + parser.add_argument('--foo', help='foo help') + parser.add_argument('--bar', help='bar help') args = parser.parse_args() print(args.foo) print(args.bar) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/test/unit/_cli/test_autocomplete_cache.py b/test/unit/_cli/test_autocomplete_cache.py index 57c218018..35e031589 100644 --- a/test/unit/_cli/test_autocomplete_cache.py +++ b/test/unit/_cli/test_autocomplete_cache.py @@ -32,7 +32,7 @@ # We can't use pytest.mark.skipif to skip forked tests because with pytest-forked, # there is an attempt to fork even if the test is marked as skipped. # See https://github.com/pytest-dev/pytest-forked/issues/44 -if sys.platform == "win32": +if sys.platform == 'win32': forked = pytest.mark.skip(reason="Tests can't be run forked on windows") else: forked = pytest.mark.forked @@ -96,10 +96,7 @@ def argcomplete_result(): def cached_complete_result(cache: autocomplete_cache.AutocompleteCache, raise_exc: bool = True): exit, output = Exit(), io.StringIO() cache.autocomplete_from_cache( - uncached_args={ - 'exit_method': exit, - 'output_stream': output - }, raise_exc=raise_exc + uncached_args={'exit_method': exit, 'output_stream': output}, raise_exc=raise_exc ) return exit.code, output.getvalue() @@ -108,10 +105,7 @@ def uncached_complete_result(cache: autocomplete_cache.AutocompleteCache): exit, output = Exit(), io.StringIO() parser = b2._internal.console_tool.B2.create_parser() cache.cache_and_autocomplete( - parser, uncached_args={ - 'exit_method': exit, - 'output_stream': output - } + parser, uncached_args={'exit_method': exit, 'output_stream': output} ) return exit.code, output.getvalue() diff --git a/test/unit/_cli/test_autocomplete_install.py b/test/unit/_cli/test_autocomplete_install.py index 50b76cc50..03d47e13b 100644 --- a/test/unit/_cli/test_autocomplete_install.py +++ b/test/unit/_cli/test_autocomplete_install.py @@ -9,7 +9,6 @@ ###################################################################### import pathlib import shutil -from test.helpers import skip_on_windows import pytest @@ -17,23 +16,26 @@ SHELL_REGISTRY, add_or_update_shell_section, ) +from test.helpers import skip_on_windows -section = "test_section" -managed_by = "pytest" -content = "test content" +section = 'test_section' +managed_by = 'pytest' +content = 'test content' @pytest.fixture def test_file(tmp_path): - yield tmp_path / "test_file.sh" + yield tmp_path / 'test_file.sh' def test_add_or_update_shell_section_new_section(test_file): - test_file.write_text("# preexisting content\n\n") + test_file.write_text('# preexisting content\n\n') add_or_update_shell_section(test_file, section, managed_by, content) - assert test_file.read_text() == f"""# preexisting content + assert ( + test_file.read_text() + == f"""# preexisting content # >>> {section} >>> @@ -41,11 +43,12 @@ def test_add_or_update_shell_section_new_section(test_file): {content} # <<< {section} <<< """ + ) def test_add_or_update_shell_section_existing_section(test_file): - old_content = "old content" - new_content = "new content" + old_content = 'old content' + new_content = 'new content' # Write the initial file with an existing section test_file.write_text( @@ -61,38 +64,44 @@ def test_add_or_update_shell_section_existing_section(test_file): # Add the new content to the section add_or_update_shell_section(test_file, section, managed_by, new_content) - assert test_file.read_text() == f"""# preexisting content + assert ( + test_file.read_text() + == f"""# preexisting content # >>> {section} >>> # This section is managed by {managed_by} . Manual edit may break automated updates. {new_content} # <<< {section} <<< """ + ) def test_add_or_update_shell_section_no_file(test_file): # Add the new content to the section, which should create the file add_or_update_shell_section(test_file, section, managed_by, content) - assert test_file.read_text() == f""" + assert ( + test_file.read_text() + == f""" # >>> {section} >>> # This section is managed by {managed_by} . Manual edit may break automated updates. {content} # <<< {section} <<< """ + ) @pytest.fixture def dummy_command(homedir, monkeypatch, env): - name = "dummy_command" - bin_path = homedir / "bin" / name + name = 'dummy_command' + bin_path = homedir / 'bin' / name bin_path.parent.mkdir(parents=True, exist_ok=True) - bin_path.symlink_to(pathlib.Path(__file__).parent / "fixtures" / f"{name}.py") - monkeypatch.setenv("PATH", f"{homedir}/bin:{env['PATH']}") + bin_path.symlink_to(pathlib.Path(__file__).parent / 'fixtures' / f'{name}.py') + monkeypatch.setenv('PATH', f"{homedir}/bin:{env['PATH']}") yield name -@pytest.mark.parametrize("shell", ["bash", "zsh", "fish"]) +@pytest.mark.parametrize('shell', ['bash', 'zsh', 'fish']) @skip_on_windows def test_autocomplete_installer(homedir, env, shell, caplog, dummy_command): caplog.set_level(10) @@ -100,7 +109,7 @@ def test_autocomplete_installer(homedir, env, shell, caplog, dummy_command): shell_bin = shutil.which(shell) if shell_bin is None: - pytest.skip(f"{shell} is not installed") + pytest.skip(f'{shell} is not installed') assert shell_installer.is_enabled() is False shell_installer.install() diff --git a/test/unit/_cli/test_obj_dumps.py b/test/unit/_cli/test_obj_dumps.py index 932b4bb3a..4f6b15437 100644 --- a/test/unit/_cli/test_obj_dumps.py +++ b/test/unit/_cli/test_obj_dumps.py @@ -15,57 +15,57 @@ # Test cases as tuples: (input_data, expected_output) test_cases = [ - ({"key": "value"}, "key: value\n"), - ([{"a": 1, "b": 2}], "- a: 1\n b: 2\n"), - ([1, 2, "false"], "- 1\n- 2\n- 'false'\n"), - ({"true": True, "null": None}, "'null': null\n'true': true\n"), - ([1., 0.567], "- 1.0\n- 0.567\n"), + ({'key': 'value'}, 'key: value\n'), + ([{'a': 1, 'b': 2}], '- a: 1\n b: 2\n'), + ([1, 2, 'false'], "- 1\n- 2\n- 'false'\n"), + ({'true': True, 'null': None}, "'null': null\n'true': true\n"), + ([1.0, 0.567], '- 1.0\n- 0.567\n'), ([''], "- ''\n"), ( # make sure id and name are first, rest should be sorted alphabetically [ - {"b": 2, "a": 1, "name": 4, "id": 3}, + {'b': 2, 'a': 1, 'name': 4, 'id': 3}, ], - "- id: 3\n name: 4\n a: 1\n b: 2\n", + '- id: 3\n name: 4\n a: 1\n b: 2\n', ), ( # nested data [ { - "name": "John Doe", - "age": 30, - "addresses": [ + 'name': 'John Doe', + 'age': 30, + 'addresses': [ { - "street": "123 Elm St", - "city": "Somewhere", + 'street': '123 Elm St', + 'city': 'Somewhere', }, { - "street": "456 Oak St", + 'street': '456 Oak St', }, ], - "address": { - "street": "789 Pine St", - "city": "Anywhere", - "zip": "67890", + 'address': { + 'street': '789 Pine St', + 'city': 'Anywhere', + 'zip': '67890', }, } ], ( - "- name: John Doe\n" - " address: \n" - " city: Anywhere\n" - " street: 789 Pine St\n" + '- name: John Doe\n' + ' address: \n' + ' city: Anywhere\n' + ' street: 789 Pine St\n' " zip: '67890'\n" - " addresses: \n" - " - city: Somewhere\n" - " street: 123 Elm St\n" - " - street: 456 Oak St\n" - " age: 30\n" + ' addresses: \n' + ' - city: Somewhere\n' + ' street: 123 Elm St\n' + ' - street: 456 Oak St\n' + ' age: 30\n' ), ), ] -@pytest.mark.parametrize("input_data,expected", test_cases) +@pytest.mark.parametrize('input_data,expected', test_cases) def test_readable_yaml_dump(input_data, expected): output = StringIO() readable_yaml_dump(input_data, output) diff --git a/test/unit/_cli/test_obj_loads.py b/test/unit/_cli/test_obj_loads.py index 5f8992da1..f92101452 100644 --- a/test/unit/_cli/test_obj_loads.py +++ b/test/unit/_cli/test_obj_loads.py @@ -22,21 +22,12 @@ @pytest.mark.parametrize( - "input_, expected_val", + 'input_, expected_val', [ # json - ('{"a": 1}', { - "a": 1 - }), - ('{"a": 1, "b": 2}', { - "a": 1, - "b": 2 - }), - ('{"a": 1, "b": 2, "c": 3}', { - "a": 1, - "b": 2, - "c": 3 - }), + ('{"a": 1}', {'a': 1}), + ('{"a": 1, "b": 2}', {'a': 1, 'b': 2}), + ('{"a": 1, "b": 2, "c": 3}', {'a': 1, 'b': 2, 'c': 3}), ], ) def test_validated_loads(input_, expected_val): @@ -44,10 +35,10 @@ def test_validated_loads(input_, expected_val): @pytest.mark.parametrize( - "input_, error_msg", + 'input_, error_msg', [ # not valid json nor yaml - ("{", "'{' is not a valid JSON value"), + ('{', "'{' is not a valid JSON value"), ], ) def test_validated_loads__invalid_syntax(input_, error_msg): @@ -66,11 +57,11 @@ class MyTypedDict(TypedDict): def test_validated_loads__typed_dict(typed_dict_cls): input_ = '{"a": 1, "b": "2", "extra": null}' - expected_val = {"a": 1, "b": "2", "extra": None} + expected_val = {'a': 1, 'b': '2', 'extra': None} assert validated_loads(input_, typed_dict_cls) == expected_val -@pytest.mark.skipif(pydantic is None, reason="pydantic is not enabled") +@pytest.mark.skipif(pydantic is None, reason='pydantic is not enabled') def test_validated_loads__typed_dict_types_validation(typed_dict_cls): input_ = '{"a": "abc", "b": 2}' with pytest.raises(argparse.ArgumentTypeError): diff --git a/test/unit/_cli/test_pickle.py b/test/unit/_cli/test_pickle.py index 37e45722f..15c93ccbd 100644 --- a/test/unit/_cli/test_pickle.py +++ b/test/unit/_cli/test_pickle.py @@ -36,6 +36,7 @@ def test_unpickle(): prevents successful unpickling of objects that depend on loading modules from b2sdk.""" from .fixtures.module_loading_b2sdk import function + pickled = pickle.dumps(function) with pytest.raises(RuntimeError): unpickle(pickled) diff --git a/test/unit/_cli/test_shell.py b/test/unit/_cli/test_shell.py index 3a5199fff..00955c0fc 100644 --- a/test/unit/_cli/test_shell.py +++ b/test/unit/_cli/test_shell.py @@ -14,6 +14,6 @@ from b2._internal._cli import shell -@mock.patch.dict(os.environ, {"SHELL": "/bin/bash"}) +@mock.patch.dict(os.environ, {'SHELL': '/bin/bash'}) def test_detect_shell(): assert shell.detect_shell() == 'bash' diff --git a/test/unit/_cli/unpickle.py b/test/unit/_cli/unpickle.py index 078e69643..898b59fa8 100644 --- a/test/unit/_cli/unpickle.py +++ b/test/unit/_cli/unpickle.py @@ -34,7 +34,7 @@ def load(self): importlib.reload(sys.modules[module]) if any('b2sdk' in module for module in sys.modules): - raise RuntimeError("Loading the pickled object imported b2sdk module") + raise RuntimeError('Loading the pickled object imported b2sdk module') return result def find_class(self, module: str, name: str) -> Any: diff --git a/test/unit/_utils/test_uri.py b/test/unit/_utils/test_uri.py index 2099d208f..1c1e26999 100644 --- a/test/unit/_utils/test_uri.py +++ b/test/unit/_utils/test_uri.py @@ -16,58 +16,58 @@ class TestB2URI: def test__str__(self): - uri = B2URI(bucket_name="testbucket", path="path/to/file") - assert str(uri) == "b2://testbucket/path/to/file" + uri = B2URI(bucket_name='testbucket', path='path/to/file') + assert str(uri) == 'b2://testbucket/path/to/file' @pytest.mark.parametrize( - "path, expected", + 'path, expected', [ - ("", True), - ("path/", True), - ("path/subpath", None), + ('', True), + ('path/', True), + ('path/subpath', None), ], ) def test_is_dir(self, path, expected): - assert B2URI("bucket", path).is_dir() is expected + assert B2URI('bucket', path).is_dir() is expected def test__bucket_uris_are_normalized(self): alternatives = [ - B2URI("bucket"), - B2URI("bucket", ""), + B2URI('bucket'), + B2URI('bucket', ''), ] assert len(set(alternatives)) == 1 - assert {str(uri) for uri in alternatives} == {"b2://bucket/"} # normalized + assert {str(uri) for uri in alternatives} == {'b2://bucket/'} # normalized @pytest.mark.parametrize( - "path, expected_uri_str", + 'path, expected_uri_str', [ - ("", "b2://bucket/"), - ("path/", "b2://bucket/path/"), - ("path/subpath", "b2://bucket/path/subpath"), + ('', 'b2://bucket/'), + ('path/', 'b2://bucket/path/'), + ('path/subpath', 'b2://bucket/path/subpath'), ], ) def test__normalization(self, path, expected_uri_str): - assert str(B2URI("bucket", path)) == expected_uri_str - assert str(B2URI("bucket", path)) == str(B2URI("bucket", path)) # normalized + assert str(B2URI('bucket', path)) == expected_uri_str + assert str(B2URI('bucket', path)) == str(B2URI('bucket', path)) # normalized def test_b2fileuri_str(): - uri = B2FileIdURI(file_id="file123") - assert str(uri) == "b2id://file123" + uri = B2FileIdURI(file_id='file123') + assert str(uri) == 'b2id://file123' @pytest.mark.parametrize( - "uri,expected", + 'uri,expected', [ - ("some/local/path", Path("some/local/path")), - ("./some/local/path", Path("some/local/path")), - ("b2://bucket", B2URI(bucket_name="bucket")), - ("b2://bucket/", B2URI(bucket_name="bucket")), - ("b2://bucket/path/to/dir/", B2URI(bucket_name="bucket", path="path/to/dir/")), - ("b2id://file123", B2FileIdURI(file_id="file123")), - ("b2://bucket/wild[card]", B2URI(bucket_name="bucket", path="wild[card]")), - ("b2://bucket/wild?card", B2URI(bucket_name="bucket", path="wild?card")), - ("b2://bucket/special#char", B2URI(bucket_name="bucket", path="special#char")), + ('some/local/path', Path('some/local/path')), + ('./some/local/path', Path('some/local/path')), + ('b2://bucket', B2URI(bucket_name='bucket')), + ('b2://bucket/', B2URI(bucket_name='bucket')), + ('b2://bucket/path/to/dir/', B2URI(bucket_name='bucket', path='path/to/dir/')), + ('b2id://file123', B2FileIdURI(file_id='file123')), + ('b2://bucket/wild[card]', B2URI(bucket_name='bucket', path='wild[card]')), + ('b2://bucket/wild?card', B2URI(bucket_name='bucket', path='wild?card')), + ('b2://bucket/special#char', B2URI(bucket_name='bucket', path='special#char')), ], ) def test_parse_uri(uri, expected): @@ -75,32 +75,33 @@ def test_parse_uri(uri, expected): def test_parse_uri__allow_all_buckets(): - assert parse_uri("b2://", allow_all_buckets=True) == B2URI("") + assert parse_uri('b2://', allow_all_buckets=True) == B2URI('') with pytest.raises(ValueError) as exc_info: - parse_uri("b2:///", allow_all_buckets=True) - assert "Invalid B2 URI: all buckets URI doesn't allow non-empty path, but '/' was provided" == str( - exc_info.value + parse_uri('b2:///', allow_all_buckets=True) + assert ( + "Invalid B2 URI: all buckets URI doesn't allow non-empty path, but '/' was provided" + == str(exc_info.value) ) @pytest.mark.parametrize( - "uri, expected_exception_message", + 'uri, expected_exception_message', [ - ("", "URI cannot be empty"), + ('', 'URI cannot be empty'), # Test cases for invalid B2 URIs (missing netloc part) - ("b2://", "Invalid B2 URI: 'b2://'"), - ("b2id://", "Invalid B2 URI: 'b2id://'"), + ('b2://', "Invalid B2 URI: 'b2://'"), + ('b2id://', "Invalid B2 URI: 'b2id://'"), # Test cases for B2 URIs with credentials ( - "b2://user@password:bucket/path", - "Invalid B2 URI: credentials passed using `user@password:` syntax is not supported in URI", + 'b2://user@password:bucket/path', + 'Invalid B2 URI: credentials passed using `user@password:` syntax is not supported in URI', ), ( - "b2id://user@password:file123", - "Invalid B2 URI: credentials passed using `user@password:` syntax is not supported in URI", + 'b2id://user@password:file123', + 'Invalid B2 URI: credentials passed using `user@password:` syntax is not supported in URI', ), # Test cases for unsupported URI schemes - ("unknown://bucket/path", "Unsupported URI scheme: 'unknown'"), + ('unknown://bucket/path', "Unsupported URI scheme: 'unknown'"), ], ) def test_parse_uri_exceptions(uri, expected_exception_message): diff --git a/test/unit/conftest.py b/test/unit/conftest.py index bb7eb21bc..1a75c57e3 100644 --- a/test/unit/conftest.py +++ b/test/unit/conftest.py @@ -35,7 +35,7 @@ def pytest_addoption(parser): @pytest.hookimpl def pytest_report_header(config): int_version = get_int_version(config.getoption('--cli')) - return f"b2 apiver: {int_version}" + return f'b2 apiver: {int_version}' @pytest.fixture(scope='session') @@ -45,17 +45,17 @@ def cli_version(request) -> str: @pytest.fixture def homedir(tmp_path_factory): - yield tmp_path_factory.mktemp("test_homedir") + yield tmp_path_factory.mktemp('test_homedir') @pytest.fixture def env(homedir, monkeypatch): """Get ENV for running b2 command from shell level.""" - monkeypatch.setenv("HOME", str(homedir)) - monkeypatch.delenv("XDG_CONFIG_HOME", raising=False) - monkeypatch.setenv("SHELL", "/bin/bash") # fix for running under github actions - if "TERM" not in os.environ: - monkeypatch.setenv("TERM", "xterm") + monkeypatch.setenv('HOME', str(homedir)) + monkeypatch.delenv('XDG_CONFIG_HOME', raising=False) + monkeypatch.setenv('SHELL', '/bin/bash') # fix for running under github actions + if 'TERM' not in os.environ: + monkeypatch.setenv('TERM', 'xterm') yield os.environ @@ -99,7 +99,7 @@ def run(self, *args, **kwargs): return self._run_command(*args, **kwargs) -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(scope='session', autouse=True) def mock_signal(): with mock.patch('signal.signal'): yield @@ -126,8 +126,8 @@ def authorized_b2_cli(b2_cli): @pytest.fixture def bucket_info(b2_cli, authorized_b2_cli): - bucket_name = "my-bucket" - bucket_id = "bucket_0" + bucket_name = 'my-bucket' + bucket_id = 'bucket_0' b2_cli.run(['bucket', 'create', bucket_name, 'allPublic'], expected_stdout=f'{bucket_id}\n') return { 'bucketName': bucket_name, @@ -162,10 +162,10 @@ def local_file(tmp_path): @pytest.fixture def uploaded_file_with_control_chars(b2_cli, bucket_info, local_file): filename = '\u009bC\u009bC\u009bIfile.txt' - b2_cli.run(['file', 'upload', bucket_info["bucketName"], str(local_file), filename]) + b2_cli.run(['file', 'upload', bucket_info['bucketName'], str(local_file), filename]) return { - 'bucket': bucket_info["bucketName"], - 'bucketId': bucket_info["bucketId"], + 'bucket': bucket_info['bucketName'], + 'bucketId': bucket_info['bucketId'], 'fileName': filename, 'escapedFileName': '\\\\x9bC\\\\x9bC\\\\x9bIfile.txt', 'fileId': '1111', @@ -176,10 +176,10 @@ def uploaded_file_with_control_chars(b2_cli, bucket_info, local_file): @pytest.fixture def uploaded_file(b2_cli, bucket_info, local_file): filename = 'file1.txt' - b2_cli.run(['file', 'upload', '--quiet', bucket_info["bucketName"], str(local_file), filename]) + b2_cli.run(['file', 'upload', '--quiet', bucket_info['bucketName'], str(local_file), filename]) return { - 'bucket': bucket_info["bucketName"], - 'bucketId': bucket_info["bucketId"], + 'bucket': bucket_info['bucketName'], + 'bucketId': bucket_info['bucketId'], 'fileName': filename, 'fileId': '9999', 'content': local_file.read_text(), diff --git a/test/unit/console_tool/test_authorize_account.py b/test/unit/console_tool/test_authorize_account.py index 80d705847..0b7a59db5 100644 --- a/test/unit/console_tool/test_authorize_account.py +++ b/test/unit/console_tool/test_authorize_account.py @@ -27,13 +27,13 @@ def b2_cli_is_authorized_afterwards(b2_cli): def test_authorize_with_bad_key(b2_cli): - expected_stdout = "" + expected_stdout = '' expected_stderr = """ ERROR: unable to authorize account: Invalid authorization token. Server said: secret key is wrong (unauthorized) """ b2_cli._run_command( - ["account", "authorize", b2_cli.account_id, "bad-app-key"], + ['account', 'authorize', b2_cli.account_id, 'bad-app-key'], expected_stdout, expected_stderr, 1, @@ -42,19 +42,21 @@ def test_authorize_with_bad_key(b2_cli): @pytest.mark.parametrize( - "command", + 'command', [ - ["authorize-account"], - ["authorize_account"], - ["account", "authorize"], + ['authorize-account'], + ['authorize_account'], + ['account', 'authorize'], ], ) def test_authorize_with_good_key(b2_cli, b2_cli_is_authorized_afterwards, command): assert b2_cli.account_info.get_account_auth_token() is None - expected_stderr = "" if len( - command - ) == 2 else "WARNING: `authorize-account` command is deprecated. Use `account authorize` instead.\n" + expected_stderr = ( + '' + if len(command) == 2 + else 'WARNING: `authorize-account` command is deprecated. Use `account authorize` instead.\n' + ) b2_cli._run_command([*command, b2_cli.account_id, b2_cli.master_key], None, expected_stderr, 0) @@ -65,56 +67,57 @@ def test_authorize_using_env_variables(b2_cli): assert b2_cli.account_info.get_account_auth_token() is None with mock.patch.dict( - "os.environ", + 'os.environ', { B2_APPLICATION_KEY_ID_ENV_VAR: b2_cli.account_id, B2_APPLICATION_KEY_ENV_VAR: b2_cli.master_key, }, ): - b2_cli._run_command(["account", "authorize"], None, "", 0) + b2_cli._run_command(['account', 'authorize'], None, '', 0) # test deprecated command with mock.patch.dict( - "os.environ", + 'os.environ', { B2_APPLICATION_KEY_ID_ENV_VAR: b2_cli.account_id, B2_APPLICATION_KEY_ENV_VAR: b2_cli.master_key, }, ): b2_cli._run_command( - ["authorize-account"], None, + ['authorize-account'], + None, 'WARNING: `authorize-account` command is deprecated. Use `account authorize` instead.\n', - 0 + 0, ) assert b2_cli.account_info.get_account_auth_token() is not None @pytest.mark.parametrize( - "flags,realm_url", + 'flags,realm_url', [ - ([], "http://production.example.com"), - (["--debug-logs"], "http://production.example.com"), - (["--environment", "http://custom.example.com"], "http://custom.example.com"), - (["--environment", "production"], "http://production.example.com"), - (["--dev"], "http://api.backblazeb2.xyz:8180"), - (["--staging"], "https://api.backblaze.net"), + ([], 'http://production.example.com'), + (['--debug-logs'], 'http://production.example.com'), + (['--environment', 'http://custom.example.com'], 'http://custom.example.com'), + (['--environment', 'production'], 'http://production.example.com'), + (['--dev'], 'http://api.backblazeb2.xyz:8180'), + (['--staging'], 'https://api.backblaze.net'), ], ) def test_authorize_towards_realm( b2_cli, b2_cli_is_authorized_afterwards, flags, realm_url, cwd_path, b2_cli_log_fix ): - expected_stderr = f"Using {realm_url}\n" if any(f != "--debug-logs" for f in flags) else "" + expected_stderr = f'Using {realm_url}\n' if any(f != '--debug-logs' for f in flags) else '' b2_cli._run_command( - ["account", "authorize", *flags, b2_cli.account_id, b2_cli.master_key], + ['account', 'authorize', *flags, b2_cli.account_id, b2_cli.master_key], None, expected_stderr, 0, ) - log_path = cwd_path / "b2_cli.log" - if "--debug-logs" in flags: - assert f"Using {realm_url}\n" in log_path.read_text() + log_path = cwd_path / 'b2_cli.log' + if '--debug-logs' in flags: + assert f'Using {realm_url}\n' in log_path.read_text() else: assert not log_path.exists() @@ -125,13 +128,13 @@ def test_authorize_towards_custom_realm_using_env(b2_cli, b2_cli_is_authorized_a """ with mock.patch.dict( - "os.environ", + 'os.environ', { - B2_ENVIRONMENT_ENV_VAR: "http://custom2.example.com", + B2_ENVIRONMENT_ENV_VAR: 'http://custom2.example.com', }, ): b2_cli._run_command( - ["account", "authorize", b2_cli.account_id, b2_cli.master_key], + ['account', 'authorize', b2_cli.account_id, b2_cli.master_key], None, expected_stderr, 0, @@ -143,19 +146,18 @@ def test_authorize_account_prints_account_info(b2_cli): 'accountAuthToken': 'auth_token_0', 'accountFilePath': None, 'accountId': 'account-0', - 'allowed': - { - 'bucketId': None, - 'bucketName': None, - 'capabilities': sorted(ALL_CAPABILITIES), - 'namePrefix': None, - }, + 'allowed': { + 'bucketId': None, + 'bucketName': None, + 'capabilities': sorted(ALL_CAPABILITIES), + 'namePrefix': None, + }, 'apiUrl': 'http://api.example.com', 'applicationKey': 'masterKey-0', 'applicationKeyId': 'account-0', 'downloadUrl': 'http://download.example.com', 'isMasterKey': True, - 's3endpoint': 'http://s3.api.example.com' + 's3endpoint': 'http://s3.api.example.com', } b2_cli._run_command( diff --git a/test/unit/console_tool/test_download_file.py b/test/unit/console_tool/test_download_file.py index 5bb573cd5..457c692f8 100644 --- a/test/unit/console_tool/test_download_file.py +++ b/test/unit/console_tool/test_download_file.py @@ -9,11 +9,12 @@ ###################################################################### import os import pathlib -from test.helpers import skip_on_windows import pytest -EXPECTED_STDOUT_DOWNLOAD = ''' +from test.helpers import skip_on_windows + +EXPECTED_STDOUT_DOWNLOAD = """ File name: file1.txt File id: 9999 Output file path: {output_path} @@ -26,40 +27,41 @@ INFO src_last_modified_millis: 1500111222000 Checksum matches Download finished -''' +""" @pytest.mark.parametrize( - 'flag,expected_stdout', [ + 'flag,expected_stdout', + [ ('--no-progress', EXPECTED_STDOUT_DOWNLOAD), ('-q', ''), ('--quiet', ''), - ] + ], ) def test_download_file_by_uri__flag_support(b2_cli, uploaded_file, tmp_path, flag, expected_stdout): output_path = tmp_path / 'output.txt' b2_cli.run( - ['file', 'download', flag, 'b2id://9999', - str(output_path)], - expected_stdout=expected_stdout.format(output_path=pathlib.Path(output_path).resolve()) + ['file', 'download', flag, 'b2id://9999', str(output_path)], + expected_stdout=expected_stdout.format(output_path=pathlib.Path(output_path).resolve()), ) assert output_path.read_text() == uploaded_file['content'] b2_cli.run( - ['download-file', flag, 'b2id://9999', - str(output_path)], - expected_stderr= - 'WARNING: `download-file` command is deprecated. Use `file download` instead.\n', - expected_stdout=expected_stdout.format(output_path=pathlib.Path(output_path).resolve()) + ['download-file', flag, 'b2id://9999', str(output_path)], + expected_stderr='WARNING: `download-file` command is deprecated. Use `file download` instead.\n', + expected_stdout=expected_stdout.format(output_path=pathlib.Path(output_path).resolve()), ) assert output_path.read_text() == uploaded_file['content'] -@pytest.mark.parametrize('b2_uri', [ - 'b2://my-bucket/file1.txt', - 'b2id://9999', -]) +@pytest.mark.parametrize( + 'b2_uri', + [ + 'b2://my-bucket/file1.txt', + 'b2id://9999', + ], +) def test_download_file_by_uri__b2_uri_support(b2_cli, uploaded_file, tmp_path, b2_uri): output_path = tmp_path / 'output.txt' @@ -67,41 +69,44 @@ def test_download_file_by_uri__b2_uri_support(b2_cli, uploaded_file, tmp_path, b ['file', 'download', b2_uri, str(output_path)], expected_stdout=EXPECTED_STDOUT_DOWNLOAD.format( output_path=pathlib.Path(output_path).resolve() - ) + ), ) assert output_path.read_text() == uploaded_file['content'] @pytest.mark.parametrize( - 'flag,expected_stdout', [ + 'flag,expected_stdout', + [ ('--no-progress', EXPECTED_STDOUT_DOWNLOAD), ('-q', ''), ('--quiet', ''), - ] + ], ) def test_download_file_by_name(b2_cli, local_file, uploaded_file, tmp_path, flag, expected_stdout): output_path = tmp_path / 'output.txt' b2_cli.run( [ - 'download-file-by-name', uploaded_file['bucket'], uploaded_file['fileName'], - str(output_path) + 'download-file-by-name', + uploaded_file['bucket'], + uploaded_file['fileName'], + str(output_path), ], expected_stdout=EXPECTED_STDOUT_DOWNLOAD.format( output_path=pathlib.Path(output_path).resolve() ), - expected_stderr= - 'WARNING: `download-file-by-name` command is deprecated. Use `file download` instead.\n', + expected_stderr='WARNING: `download-file-by-name` command is deprecated. Use `file download` instead.\n', ) assert output_path.read_text() == uploaded_file['content'] @pytest.mark.parametrize( - 'flag,expected_stdout', [ + 'flag,expected_stdout', + [ ('--no-progress', EXPECTED_STDOUT_DOWNLOAD), ('-q', ''), ('--quiet', ''), - ] + ], ) def test_download_file_by_id(b2_cli, uploaded_file, tmp_path, flag, expected_stdout): output_path = tmp_path / 'output.txt' @@ -109,8 +114,7 @@ def test_download_file_by_id(b2_cli, uploaded_file, tmp_path, flag, expected_std b2_cli.run( ['download-file-by-id', flag, '9999', str(output_path)], expected_stdout=expected_stdout.format(output_path=pathlib.Path(output_path).resolve()), - expected_stderr= - 'WARNING: `download-file-by-id` command is deprecated. Use `file download` instead.\n', + expected_stderr='WARNING: `download-file-by-id` command is deprecated. Use `file download` instead.\n', ) assert output_path.read_text() == uploaded_file['content'] @@ -132,15 +136,16 @@ def reader(): b2_cli.run( [ - 'download-file-by-name', '--no-progress', uploaded_file['bucket'], + 'download-file-by-name', + '--no-progress', + uploaded_file['bucket'], uploaded_file['fileName'], - str(output_path) + str(output_path), ], expected_stdout=EXPECTED_STDOUT_DOWNLOAD.format( output_path=pathlib.Path(output_path).resolve() ), - expected_stderr= - 'WARNING: `download-file-by-name` command is deprecated. Use `file download` instead.\n', + expected_stderr='WARNING: `download-file-by-name` command is deprecated. Use `file download` instead.\n', ) reader_future.result(timeout=1) assert output_string == uploaded_file['content'] @@ -163,8 +168,7 @@ def test_download_file_by_name__to_stdout_by_alias( """Test download-file-by-name stdout alias support""" b2_cli.run( ['download-file-by-name', '--no-progress', bucket, uploaded_stdout_txt['fileName'], '-'], - expected_stderr= - 'WARNING: `download-file-by-name` command is deprecated. Use `file download` instead.\n', + expected_stderr='WARNING: `download-file-by-name` command is deprecated. Use `file download` instead.\n', ) assert capfd.readouterr().out == uploaded_stdout_txt['content'] assert not pathlib.Path('-').exists() @@ -179,7 +183,7 @@ def test_cat__b2_uri(b2_cli, bucket, uploaded_stdout_txt, tmp_path, capfd): def test_cat__b2_uri__invalid(b2_cli, capfd): b2_cli.run( - ['file', 'cat', "nothing/meaningful"], + ['file', 'cat', 'nothing/meaningful'], expected_stderr=None, expected_status=2, ) @@ -188,21 +192,25 @@ def test_cat__b2_uri__invalid(b2_cli, capfd): def test_cat__b2_uri__not_a_file(b2_cli, bucket, capfd): b2_cli.run( - ['file', 'cat', "b2://bucket/dir/subdir/"], + ['file', 'cat', 'b2://bucket/dir/subdir/'], expected_stderr=None, expected_status=2, ) - assert "argument B2_URI: B2 URI pointing to a file-like object is required" in capfd.readouterr( - ).err + assert ( + 'argument B2_URI: B2 URI pointing to a file-like object is required' + in capfd.readouterr().err + ) def test_cat__b2id_uri(b2_cli, bucket, uploaded_stdout_txt, tmp_path, capfd): - b2_cli.run(['file', 'cat', '--no-progress', "b2id://9999"],) + b2_cli.run( + ['file', 'cat', '--no-progress', 'b2id://9999'], + ) assert capfd.readouterr().out == uploaded_stdout_txt['content'] b2_cli.run( - ['cat', '--no-progress', "b2id://9999"], - expected_stderr='WARNING: `cat` command is deprecated. Use `file cat` instead.\n' + ['cat', '--no-progress', 'b2id://9999'], + expected_stderr='WARNING: `cat` command is deprecated. Use `file cat` instead.\n', ) assert capfd.readouterr().out == uploaded_stdout_txt['content'] @@ -213,9 +221,13 @@ def test__download_file__threads(b2_cli, local_file, uploaded_file, tmp_path): b2_cli.run( [ - 'file', 'download', '--no-progress', '--threads', - str(num_threads), 'b2://my-bucket/file1.txt', - str(output_path) + 'file', + 'download', + '--no-progress', + '--threads', + str(num_threads), + 'b2://my-bucket/file1.txt', + str(output_path), ] ) diff --git a/test/unit/console_tool/test_file_hide.py b/test/unit/console_tool/test_file_hide.py index e876c58b9..f2f303552 100644 --- a/test/unit/console_tool/test_file_hide.py +++ b/test/unit/console_tool/test_file_hide.py @@ -15,8 +15,8 @@ @pytest.mark.apiver(to_ver=3) def test_legacy_hide_file(b2_cli, api_bucket, uploaded_file): b2_cli.run( - ["hide-file", uploaded_file["bucket"], uploaded_file["fileName"]], - expected_stderr='WARNING: `hide-file` command is deprecated. Use `file hide` instead.\n' + ['hide-file', uploaded_file['bucket'], uploaded_file['fileName']], + expected_stderr='WARNING: `hide-file` command is deprecated. Use `file hide` instead.\n', ) assert not list(api_bucket.ls()) @@ -24,7 +24,7 @@ def test_legacy_hide_file(b2_cli, api_bucket, uploaded_file): @pytest.mark.apiver(to_ver=4) def test_file_hide__by_bucket_and_file_name(b2_cli, api_bucket, uploaded_file): b2_cli.run( - ["file", "hide", uploaded_file["bucket"], uploaded_file["fileName"]], + ['file', 'hide', uploaded_file['bucket'], uploaded_file['fileName']], expected_stderr=( 'WARNING: "bucketName fileName" arguments syntax is deprecated, use "b2://bucketName/fileName" instead\n' ), @@ -34,11 +34,11 @@ def test_file_hide__by_bucket_and_file_name(b2_cli, api_bucket, uploaded_file): @pytest.mark.apiver def test_file_hide__by_b2_uri(b2_cli, api_bucket, uploaded_file): - b2_cli.run(["file", "hide", f"b2://{uploaded_file['bucket']}/{uploaded_file['fileName']}"]) + b2_cli.run(['file', 'hide', f"b2://{uploaded_file['bucket']}/{uploaded_file['fileName']}"]) assert not list(api_bucket.ls()) @pytest.mark.apiver def test_file_hide__cannot_hide_by_b2id(b2_cli, api_bucket, uploaded_file): - b2_cli.run(["file", "hide", f"b2id://{uploaded_file['fileId']}"], expected_status=2) + b2_cli.run(['file', 'hide', f"b2id://{uploaded_file['fileId']}"], expected_status=2) assert list(api_bucket.ls()) diff --git a/test/unit/console_tool/test_file_info.py b/test/unit/console_tool/test_file_info.py index 9da815d2b..2e1b6f2ae 100644 --- a/test/unit/console_tool/test_file_info.py +++ b/test/unit/console_tool/test_file_info.py @@ -13,18 +13,14 @@ @pytest.fixture def uploaded_download_version(b2_cli, bucket_info, uploaded_file): return { - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "b2/x-auto", - "fileId": uploaded_file["fileId"], - "fileInfo": { - "src_last_modified_millis": "1500111222000" - }, - "fileName": "file1.txt", - "serverSideEncryption": { - "mode": "none" - }, - "size": 11, - "uploadTimestamp": 5000, + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'b2/x-auto', + 'fileId': uploaded_file['fileId'], + 'fileInfo': {'src_last_modified_millis': '1500111222000'}, + 'fileName': 'file1.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'size': 11, + 'uploadTimestamp': 5000, } @@ -32,15 +28,15 @@ def uploaded_download_version(b2_cli, bucket_info, uploaded_file): def uploaded_file_version(b2_cli, bucket_info, uploaded_file, uploaded_download_version): return { **uploaded_download_version, - "accountId": b2_cli.account_id, - "action": "upload", - "bucketId": uploaded_file["bucketId"], + 'accountId': b2_cli.account_id, + 'action': 'upload', + 'bucketId': uploaded_file['bucketId'], } def test_get_file_info(b2_cli, uploaded_file_version): b2_cli.run( - ["get-file-info", uploaded_file_version["fileId"]], + ['get-file-info', uploaded_file_version['fileId']], expected_json_in_stdout=uploaded_file_version, expected_stderr='WARNING: `get-file-info` command is deprecated. Use `file info` instead.\n', ) @@ -49,8 +45,8 @@ def test_get_file_info(b2_cli, uploaded_file_version): def test_file_info__b2_uri(b2_cli, bucket, uploaded_download_version): b2_cli.run( [ - "file", - "info", + 'file', + 'info', f'b2://{bucket}/{uploaded_download_version["fileName"]}', ], expected_json_in_stdout=uploaded_download_version, @@ -59,6 +55,6 @@ def test_file_info__b2_uri(b2_cli, bucket, uploaded_download_version): def test_file_info__b2id_uri(b2_cli, uploaded_file_version): b2_cli.run( - ["file", "info", f'b2id://{uploaded_file_version["fileId"]}'], + ['file', 'info', f'b2id://{uploaded_file_version["fileId"]}'], expected_json_in_stdout=uploaded_file_version, ) diff --git a/test/unit/console_tool/test_file_server_side_copy.py b/test/unit/console_tool/test_file_server_side_copy.py index 1845a6a3a..f74409a66 100644 --- a/test/unit/console_tool/test_file_server_side_copy.py +++ b/test/unit/console_tool/test_file_server_side_copy.py @@ -15,53 +15,48 @@ @pytest.mark.apiver def test_copy_file_by_id(b2_cli, api_bucket, uploaded_file): expected_json = { - "accountId": b2_cli.account_id, - "action": "copy", - "bucketId": api_bucket.id_, - "size": 11, - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "b2/x-auto", - "fileId": "9998", - "fileInfo": { - "src_last_modified_millis": "1500111222000" - }, - "fileName": "file1_copy.txt", - "serverSideEncryption": { - "mode": "none" - }, - "uploadTimestamp": 5001 + 'accountId': b2_cli.account_id, + 'action': 'copy', + 'bucketId': api_bucket.id_, + 'size': 11, + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'b2/x-auto', + 'fileId': '9998', + 'fileInfo': {'src_last_modified_millis': '1500111222000'}, + 'fileName': 'file1_copy.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'uploadTimestamp': 5001, } b2_cli.run( ['file', 'copy-by-id', '9999', 'my-bucket', 'file1_copy.txt'], expected_json_in_stdout=expected_json, - expected_stderr= - 'WARNING: `copy-by-id` command is deprecated. Use `file server-side-copy` instead.\n', + expected_stderr='WARNING: `copy-by-id` command is deprecated. Use `file server-side-copy` instead.\n', ) @pytest.mark.apiver def test_file_server_side_copy__with_range(b2_cli, api_bucket, uploaded_file): expected_json = { - "accountId": b2_cli.account_id, - "action": "copy", - "bucketId": api_bucket.id_, - "size": 5, - "contentSha1": "4f664540ff30b8d34e037298a84e4736be39d731", - "contentType": "b2/x-auto", - "fileId": "9998", - "fileInfo": { - "src_last_modified_millis": "1500111222000" - }, - "fileName": "file1_copy.txt", - "serverSideEncryption": { - "mode": "none" - }, - "uploadTimestamp": 5001 + 'accountId': b2_cli.account_id, + 'action': 'copy', + 'bucketId': api_bucket.id_, + 'size': 5, + 'contentSha1': '4f664540ff30b8d34e037298a84e4736be39d731', + 'contentType': 'b2/x-auto', + 'fileId': '9998', + 'fileInfo': {'src_last_modified_millis': '1500111222000'}, + 'fileName': 'file1_copy.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'uploadTimestamp': 5001, } b2_cli.run( [ - 'file', 'server-side-copy', '--range', '3,7', f'b2id://{uploaded_file["fileId"]}', - 'b2://my-bucket/file1_copy.txt' + 'file', + 'server-side-copy', + '--range', + '3,7', + f'b2id://{uploaded_file["fileId"]}', + 'b2://my-bucket/file1_copy.txt', ], expected_json_in_stdout=expected_json, ) @@ -81,7 +76,7 @@ def test_file_server_side_copy__invalid_metadata_copy_with_file_info( 'b2://my-bucket/file1_copy.txt', ], '', - expected_stderr="ERROR: File info can be set only when content type is set\n", + expected_stderr='ERROR: File info can be set only when content type is set\n', expected_status=1, ) @@ -100,27 +95,23 @@ def test_file_server_side_copy__invalid_metadata_replace_file_info( 'b2://my-bucket/file1_copy.txt', ], '', - expected_stderr="ERROR: File info can be not set only when content type is not set\n", + expected_stderr='ERROR: File info can be not set only when content type is not set\n', expected_status=1, ) # replace with content type and file info expected_json = { - "accountId": b2_cli.account_id, - "action": "copy", - "bucketId": api_bucket.id_, - "size": 11, - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "text/plain", - "fileId": "9998", - "fileInfo": { - "a": "b" - }, - "fileName": "file1_copy.txt", - "serverSideEncryption": { - "mode": "none" - }, - "uploadTimestamp": 5001 + 'accountId': b2_cli.account_id, + 'action': 'copy', + 'bucketId': api_bucket.id_, + 'size': 11, + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'text/plain', + 'fileId': '9998', + 'fileInfo': {'a': 'b'}, + 'fileName': 'file1_copy.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'uploadTimestamp': 5001, } b2_cli.run( [ @@ -139,11 +130,15 @@ def test_file_server_side_copy__invalid_metadata_replace_file_info( @pytest.mark.apiver def test_file_server_side_copy__unsatisfied_range(b2_cli, api_bucket, uploaded_file): - expected_stderr = "ERROR: The range in the request is outside the size of the file\n" + expected_stderr = 'ERROR: The range in the request is outside the size of the file\n' b2_cli.run( [ - 'file', 'server-side-copy', '--range', '12,20', 'b2id://9999', - 'b2://my-bucket/file1_copy.txt' + 'file', + 'server-side-copy', + '--range', + '12,20', + 'b2id://9999', + 'b2://my-bucket/file1_copy.txt', ], '', expected_stderr, @@ -153,21 +148,17 @@ def test_file_server_side_copy__unsatisfied_range(b2_cli, api_bucket, uploaded_f # Copy in different bucket b2_cli.run(['bucket', 'create', 'my-bucket1', 'allPublic'], 'bucket_1\n', '', 0) expected_json = { - "accountId": b2_cli.account_id, - "action": "copy", - "bucketId": "bucket_1", - "size": 11, - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "b2/x-auto", - "fileId": "9997", - "fileInfo": { - "src_last_modified_millis": "1500111222000" - }, - "fileName": "file1_copy.txt", - "serverSideEncryption": { - "mode": "none" - }, - "uploadTimestamp": 5001 + 'accountId': b2_cli.account_id, + 'action': 'copy', + 'bucketId': 'bucket_1', + 'size': 11, + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'b2/x-auto', + 'fileId': '9997', + 'fileInfo': {'src_last_modified_millis': '1500111222000'}, + 'fileName': 'file1_copy.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'uploadTimestamp': 5001, } b2_cli.run( ['file', 'server-side-copy', 'b2id://9999', 'b2://my-bucket1/file1_copy.txt'], @@ -178,26 +169,21 @@ def test_file_server_side_copy__unsatisfied_range(b2_cli, api_bucket, uploaded_f @pytest.mark.apiver def test_copy_file_by_id__deprecated(b2_cli, api_bucket, uploaded_file): expected_json = { - "accountId": b2_cli.account_id, - "action": "copy", - "bucketId": api_bucket.id_, - "size": 11, - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "b2/x-auto", - "fileId": "9998", - "fileInfo": { - "src_last_modified_millis": "1500111222000" - }, - "fileName": "file1_copy_2.txt", - "serverSideEncryption": { - "mode": "none" - }, - "uploadTimestamp": 5001 + 'accountId': b2_cli.account_id, + 'action': 'copy', + 'bucketId': api_bucket.id_, + 'size': 11, + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'b2/x-auto', + 'fileId': '9998', + 'fileInfo': {'src_last_modified_millis': '1500111222000'}, + 'fileName': 'file1_copy_2.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'uploadTimestamp': 5001, } b2_cli.run( ['copy-file-by-id', '9999', api_bucket.name, 'file1_copy_2.txt'], - expected_stderr= - 'WARNING: `copy-file-by-id` command is deprecated. Use `file server-side-copy` instead.\n', + expected_stderr='WARNING: `copy-file-by-id` command is deprecated. Use `file server-side-copy` instead.\n', expected_json_in_stdout=expected_json, ) @@ -206,9 +192,10 @@ def test_copy_file_by_id__deprecated(b2_cli, api_bucket, uploaded_file): def test_file_server_side_copy__by_b2_uri(b2_cli, api_bucket, uploaded_file): b2_cli.run( [ - "file", "server-side-copy", + 'file', + 'server-side-copy', f"b2://{uploaded_file['bucket']}/{uploaded_file['fileName']}", - f"b2://{uploaded_file['bucket']}/copy.bin" + f"b2://{uploaded_file['bucket']}/copy.bin", ], ) assert [fv.file_name for fv, _ in api_bucket.ls()] == ['copy.bin', uploaded_file['fileName']] @@ -218,8 +205,10 @@ def test_file_server_side_copy__by_b2_uri(b2_cli, api_bucket, uploaded_file): def test_file_hide__by_b2id_uri(b2_cli, api_bucket, uploaded_file): b2_cli.run( [ - "file", "server-side-copy", f"b2id://{uploaded_file['fileId']}", - f"b2://{uploaded_file['bucket']}/copy.bin" + 'file', + 'server-side-copy', + f"b2id://{uploaded_file['fileId']}", + f"b2://{uploaded_file['bucket']}/copy.bin", ], ) assert [fv.file_name for fv, _ in api_bucket.ls()] == ['copy.bin', uploaded_file['fileName']] diff --git a/test/unit/console_tool/test_get_url.py b/test/unit/console_tool/test_get_url.py index a21405851..94dd42bc5 100644 --- a/test/unit/console_tool/test_get_url.py +++ b/test/unit/console_tool/test_get_url.py @@ -12,7 +12,9 @@ @pytest.fixture def uploaded_file_url(bucket_info, uploaded_file): - return f"http://download.example.com/file/{bucket_info['bucketName']}/{uploaded_file['fileName']}" + return ( + f"http://download.example.com/file/{bucket_info['bucketName']}/{uploaded_file['fileName']}" + ) @pytest.fixture @@ -22,42 +24,41 @@ def uploaded_file_url_by_id(uploaded_file): def test_get_url(b2_cli, uploaded_file, uploaded_file_url_by_id): b2_cli.run( - ["get-url", f"b2id://{uploaded_file['fileId']}"], - expected_stdout=f"{uploaded_file_url_by_id}\n", + ['get-url', f"b2id://{uploaded_file['fileId']}"], + expected_stdout=f'{uploaded_file_url_by_id}\n', expected_stderr='WARNING: `get-url` command is deprecated. Use `file url` instead.\n', ) def test_make_url(b2_cli, uploaded_file, uploaded_file_url_by_id): b2_cli.run( - ["make-url", uploaded_file["fileId"]], - expected_stdout=f"{uploaded_file_url_by_id}\n", + ['make-url', uploaded_file['fileId']], + expected_stdout=f'{uploaded_file_url_by_id}\n', expected_stderr='WARNING: `make-url` command is deprecated. Use `file url` instead.\n', ) def test_make_friendly_url(b2_cli, bucket, uploaded_file, uploaded_file_url): b2_cli.run( - ["make-friendly-url", bucket, uploaded_file["fileName"]], - expected_stdout=f"{uploaded_file_url}\n", - expected_stderr= - 'WARNING: `make-friendly-url` command is deprecated. Use `file url` instead.\n', + ['make-friendly-url', bucket, uploaded_file['fileName']], + expected_stdout=f'{uploaded_file_url}\n', + expected_stderr='WARNING: `make-friendly-url` command is deprecated. Use `file url` instead.\n', ) def test_get_url__b2_uri(b2_cli, bucket, uploaded_file, uploaded_file_url): b2_cli.run( [ - "file", - "url", + 'file', + 'url', f'b2://{bucket}/{uploaded_file["fileName"]}', ], - expected_stdout=f"{uploaded_file_url}\n", + expected_stdout=f'{uploaded_file_url}\n', ) def test_get_url__b2id_uri(b2_cli, uploaded_file, uploaded_file_url_by_id): b2_cli.run( - ["file", "url", f'b2id://{uploaded_file["fileId"]}'], - expected_stdout=f"{uploaded_file_url_by_id}\n", + ['file', 'url', f'b2id://{uploaded_file["fileId"]}'], + expected_stdout=f'{uploaded_file_url_by_id}\n', ) diff --git a/test/unit/console_tool/test_help.py b/test/unit/console_tool/test_help.py index d908c1e3f..0d72c1b8c 100644 --- a/test/unit/console_tool/test_help.py +++ b/test/unit/console_tool/test_help.py @@ -11,18 +11,18 @@ @pytest.mark.parametrize( - "flag, included, excluded", + 'flag, included, excluded', [ # --help shouldn't show deprecated commands ( - "--help", - [" b2 file ", "-h", "--help-all"], - [" b2 download-file-by-name ", "(DEPRECATED)"], + '--help', + [' b2 file ', '-h', '--help-all'], + [' b2 download-file-by-name ', '(DEPRECATED)'], ), # --help-all should show deprecated commands, but marked as deprecated ( - "--help-all", - ["(DEPRECATED) b2 download-file-by-name ", "-h", "--help-all"], + '--help-all', + ['(DEPRECATED) b2 download-file-by-name ', '-h', '--help-all'], [], ), ], @@ -39,5 +39,5 @@ def test_help(b2_cli, flag, included, excluded, capsys): for e in excluded: if e in out: found.add(e) - assert found.issuperset(included), f"expected {included!r} in {out!r}" - assert found.isdisjoint(excluded), f"expected {excluded!r} not in {out!r}" + assert found.issuperset(included), f'expected {included!r} in {out!r}' + assert found.isdisjoint(excluded), f'expected {excluded!r} not in {out!r}' diff --git a/test/unit/console_tool/test_install_autocomplete.py b/test/unit/console_tool/test_install_autocomplete.py index bbf3fd6c3..263251b04 100644 --- a/test/unit/console_tool/test_install_autocomplete.py +++ b/test/unit/console_tool/test_install_autocomplete.py @@ -10,33 +10,34 @@ import contextlib import shutil -from test.helpers import skip_on_windows import pexpect import pytest +from test.helpers import skip_on_windows + @contextlib.contextmanager def pexpect_shell(shell_bin, env): - p = pexpect.spawn(f"{shell_bin} -i", env=env, maxread=1000) + p = pexpect.spawn(f'{shell_bin} -i', env=env, maxread=1000) p.setwinsize(100, 100) # required to see all suggestions in tests yield p p.close() -@pytest.mark.parametrize("shell", ["bash", "zsh", "fish"]) +@pytest.mark.parametrize('shell', ['bash', 'zsh', 'fish']) @skip_on_windows def test_install_autocomplete(b2_cli, env, shell, monkeypatch): shell_bin = shutil.which(shell) if shell_bin is None: - pytest.skip(f"{shell} is not installed") + pytest.skip(f'{shell} is not installed') - monkeypatch.setenv("SHELL", shell_bin) + monkeypatch.setenv('SHELL', shell_bin) b2_cli.run( - ["install-autocomplete"], - expected_part_of_stdout=f"Autocomplete successfully installed for {shell}", + ['install-autocomplete'], + expected_part_of_stdout=f'Autocomplete successfully installed for {shell}', ) with pexpect_shell(shell_bin, env=env) as pshell: - pshell.send("b2 \t\t") - pshell.expect_exact(["authorize-account", "download-file", "get-bucket"], timeout=30) + pshell.send('b2 \t\t') + pshell.expect_exact(['authorize-account', 'download-file', 'get-bucket'], timeout=30) diff --git a/test/unit/console_tool/test_ls.py b/test/unit/console_tool/test_ls.py index f6239d0c4..5b53b0444 100644 --- a/test/unit/console_tool/test_ls.py +++ b/test/unit/console_tool/test_ls.py @@ -13,47 +13,43 @@ def test_ls__without_bucket_name(b2_cli, bucket_info): - expected_output = "bucket_0 allPublic my-bucket\n" + expected_output = 'bucket_0 allPublic my-bucket\n' - b2_cli.run(["ls"], expected_stdout=expected_output) - b2_cli.run(["ls", "b2://"], expected_stdout=expected_output) + b2_cli.run(['ls'], expected_stdout=expected_output) + b2_cli.run(['ls', 'b2://'], expected_stdout=expected_output) def test_ls__without_bucket_name__json(b2_cli, bucket_info): expected_output = [ { - "accountId": "account-0", - "bucketId": "bucket_0", - "bucketInfo": {}, - "bucketName": "my-bucket", - "bucketType": "allPublic", - "corsRules": [], - "defaultRetention": { - "mode": None + 'accountId': 'account-0', + 'bucketId': 'bucket_0', + 'bucketInfo': {}, + 'bucketName': 'my-bucket', + 'bucketType': 'allPublic', + 'corsRules': [], + 'defaultRetention': {'mode': None}, + 'defaultServerSideEncryption': {'mode': 'none'}, + 'isFileLockEnabled': False, + 'lifecycleRules': [], + 'options': [], + 'replication': { + 'asReplicationDestination': None, + 'asReplicationSource': None, }, - "defaultServerSideEncryption": { - "mode": "none" - }, - "isFileLockEnabled": False, - "lifecycleRules": [], - "options": [], - "replication": { - "asReplicationDestination": None, - "asReplicationSource": None, - }, - "revision": 1, + 'revision': 1, } ] - b2_cli.run(["ls", "--json"], expected_json_in_stdout=expected_output) - b2_cli.run(["ls", "--json", "b2://"], expected_json_in_stdout=expected_output) + b2_cli.run(['ls', '--json'], expected_json_in_stdout=expected_output) + b2_cli.run(['ls', '--json', 'b2://'], expected_json_in_stdout=expected_output) -@pytest.mark.parametrize("flag", ["--long", "--recursive", "--replication"]) +@pytest.mark.parametrize('flag', ['--long', '--recursive', '--replication']) def test_ls__without_bucket_name__option_not_supported(b2_cli, bucket_info, flag): b2_cli.run( - ["ls", flag], - expected_stderr=f"ERROR: Cannot use {flag} option without specifying a bucket name\n", + ['ls', flag], + expected_stderr=f'ERROR: Cannot use {flag} option without specifying a bucket name\n', expected_status=1, ) @@ -61,9 +57,9 @@ def test_ls__without_bucket_name__option_not_supported(b2_cli, bucket_info, flag @pytest.mark.apiver(to_ver=3) def test_ls__pre_v4__should_not_return_exact_match_filename(b2_cli, uploaded_file): """`b2v3 ls bucketName folderName` should not return files named `folderName` even if such exist""" - b2_cli.run(["ls", uploaded_file['bucket']], expected_stdout='file1.txt\n') # sanity check + b2_cli.run(['ls', uploaded_file['bucket']], expected_stdout='file1.txt\n') # sanity check b2_cli.run( - ["ls", uploaded_file['bucket'], uploaded_file['fileName']], + ['ls', uploaded_file['bucket'], uploaded_file['fileName']], expected_stdout='', ) @@ -71,7 +67,7 @@ def test_ls__pre_v4__should_not_return_exact_match_filename(b2_cli, uploaded_fil @pytest.mark.apiver(from_ver=4) def test_ls__b2_uri__pointing_to_bucket(b2_cli, uploaded_file): b2_cli.run( - ["ls", f"b2://{uploaded_file['bucket']}/"], + ['ls', f"b2://{uploaded_file['bucket']}/"], expected_stdout='file1.txt\n', ) @@ -79,11 +75,11 @@ def test_ls__b2_uri__pointing_to_bucket(b2_cli, uploaded_file): @pytest.mark.apiver(from_ver=4) def test_ls__b2_uri__pointing_to_a_file(b2_cli, uploaded_file): b2_cli.run( - ["ls", f"b2://{uploaded_file['bucket']}/{uploaded_file['fileName']}"], + ['ls', f"b2://{uploaded_file['bucket']}/{uploaded_file['fileName']}"], expected_stdout='file1.txt\n', ) b2_cli.run( - ["ls", f"b2://{uploaded_file['bucket']}/nonExistingFile"], + ['ls', f"b2://{uploaded_file['bucket']}/nonExistingFile"], expected_stdout='', ) diff --git a/test/unit/console_tool/test_notification_rules.py b/test/unit/console_tool/test_notification_rules.py index 4fd236f8d..83d017107 100644 --- a/test/unit/console_tool/test_notification_rules.py +++ b/test/unit/console_tool/test_notification_rules.py @@ -15,29 +15,29 @@ @pytest.fixture() def bucket_notification_rule(b2_cli, bucket): rule = { - "eventTypes": ["b2:ObjectCreated:*"], - "isEnabled": True, - "isSuspended": False, - "name": "test-rule", - "objectNamePrefix": "", - "suspensionReason": "", - "targetConfiguration": { - "targetType": "webhook", - "url": "https://example.com/webhook", + 'eventTypes': ['b2:ObjectCreated:*'], + 'isEnabled': True, + 'isSuspended': False, + 'name': 'test-rule', + 'objectNamePrefix': '', + 'suspensionReason': '', + 'targetConfiguration': { + 'targetType': 'webhook', + 'url': 'https://example.com/webhook', }, } _, stdout, _ = b2_cli.run( [ - "bucket", - "notification-rule", - "create", - "--json", - f"b2://{bucket}", - "test-rule", - "--webhook-url", - "https://example.com/webhook", - "--event-type", - "b2:ObjectCreated:*", + 'bucket', + 'notification-rule', + 'create', + '--json', + f'b2://{bucket}', + 'test-rule', + '--webhook-url', + 'https://example.com/webhook', + '--event-type', + 'b2:ObjectCreated:*', ], ) actual_rule = json.loads(stdout) @@ -45,15 +45,18 @@ def bucket_notification_rule(b2_cli, bucket): return actual_rule -@pytest.mark.parametrize("command", [["bucket", "notification-rule"], ["notification-rules"]]) +@pytest.mark.parametrize('command', [['bucket', 'notification-rule'], ['notification-rules']]) def test_notification_rules__list_all(b2_cli, bucket, bucket_notification_rule, command): - _, stdout, _ = b2_cli.run([ - *command, - "list", - f"b2://{bucket}", - ]) + _, stdout, _ = b2_cli.run( + [ + *command, + 'list', + f'b2://{bucket}', + ] + ) assert ( - stdout == f"""\ + stdout + == f"""\ Notification rules for b2://{bucket}/ : - name: test-rule eventTypes: @@ -69,49 +72,51 @@ def test_notification_rules__list_all(b2_cli, bucket, bucket_notification_rule, ) -@pytest.mark.parametrize("command", [["bucket", "notification-rule"], ["notification-rules"]]) +@pytest.mark.parametrize('command', [['bucket', 'notification-rule'], ['notification-rules']]) def test_notification_rules__list_all_json(b2_cli, bucket, bucket_notification_rule, command): - _, stdout, _ = b2_cli.run([ - *command, - "list", - "--json", - f"b2://{bucket}", - ]) + _, stdout, _ = b2_cli.run( + [ + *command, + 'list', + '--json', + f'b2://{bucket}', + ] + ) assert json.loads(stdout) == [bucket_notification_rule] -@pytest.mark.parametrize("command", [["bucket", "notification-rule"], ["notification-rules"]]) +@pytest.mark.parametrize('command', [['bucket', 'notification-rule'], ['notification-rules']]) def test_notification_rules__update(b2_cli, bucket, bucket_notification_rule, command): - bucket_notification_rule["isEnabled"] = False + bucket_notification_rule['isEnabled'] = False _, stdout, _ = b2_cli.run( [ *command, - "update", - "--json", - f"b2://{bucket}", - bucket_notification_rule["name"], - "--disable", - "--custom-header", - "X-Custom-Header=value=1", + 'update', + '--json', + f'b2://{bucket}', + bucket_notification_rule['name'], + '--disable', + '--custom-header', + 'X-Custom-Header=value=1', ], ) - bucket_notification_rule["targetConfiguration"]["customHeaders"] = { - "X-Custom-Header": "value=1" + bucket_notification_rule['targetConfiguration']['customHeaders'] = { + 'X-Custom-Header': 'value=1' } assert json.loads(stdout) == bucket_notification_rule -@pytest.mark.parametrize("command", [["bucket", "notification-rule"], ["notification-rules"]]) +@pytest.mark.parametrize('command', [['bucket', 'notification-rule'], ['notification-rules']]) def test_notification_rules__update__no_such_rule( b2_cli, bucket, bucket_notification_rule, command ): b2_cli.run( [ *command, - "update", - f"b2://{bucket}", + 'update', + f'b2://{bucket}', f'{bucket_notification_rule["name"]}-unexisting', - "--disable", + '--disable', ], expected_stderr=( "ERROR: rule with name 'test-rule-unexisting' does not exist on bucket " @@ -121,25 +126,25 @@ def test_notification_rules__update__no_such_rule( ) -@pytest.mark.parametrize("command", [["bucket", "notification-rule"], ["notification-rules"]]) +@pytest.mark.parametrize('command', [['bucket', 'notification-rule'], ['notification-rules']]) def test_notification_rules__update__custom_header_malformed( b2_cli, bucket, bucket_notification_rule, command ): - bucket_notification_rule["isEnabled"] = False + bucket_notification_rule['isEnabled'] = False _, stdout, _ = b2_cli.run( [ *command, - "update", - "--json", - f"b2://{bucket}", - bucket_notification_rule["name"], - "--disable", - "--custom-header", - "X-Custom-Header: value", + 'update', + '--json', + f'b2://{bucket}', + bucket_notification_rule['name'], + '--disable', + '--custom-header', + 'X-Custom-Header: value', ], ) - bucket_notification_rule["targetConfiguration"]["customHeaders"] = { - "X-Custom-Header: value": "" + bucket_notification_rule['targetConfiguration']['customHeaders'] = { + 'X-Custom-Header: value': '' } assert json.loads(stdout) == bucket_notification_rule @@ -147,76 +152,76 @@ def test_notification_rules__update__custom_header_malformed( def test_notification_rules__delete(b2_cli, bucket, bucket_notification_rule): _, stdout, _ = b2_cli.run( [ - "bucket", - "notification-rule", - "delete", - f"b2://{bucket}", - bucket_notification_rule["name"], + 'bucket', + 'notification-rule', + 'delete', + f'b2://{bucket}', + bucket_notification_rule['name'], ], ) assert stdout == "Rule 'test-rule' has been deleted from b2://my-bucket/\n" -@pytest.mark.parametrize("command", [["bucket", "notification-rule"], ["notification-rules"]]) +@pytest.mark.parametrize('command', [['bucket', 'notification-rule'], ['notification-rules']]) def test_notification_rules__delete_no_such_rule(b2_cli, bucket, bucket_notification_rule, command): b2_cli.run( [ *command, - "delete", - f"b2://{bucket}", + 'delete', + f'b2://{bucket}', f'{bucket_notification_rule["name"]}-unexisting', ], expected_stderr=( "ERROR: no such rule to delete: 'test-rule-unexisting', available rules: ['test-rule'];" - " No rules have been deleted.\n" + ' No rules have been deleted.\n' ), expected_status=1, ) @pytest.mark.parametrize( - "args,expected_stdout", + 'args,expected_stdout', [ - (["-q"], ""), - ([], "No notification rules for b2://my-bucket/\n"), - (["--json"], "[]\n"), + (['-q'], ''), + ([], 'No notification rules for b2://my-bucket/\n'), + (['--json'], '[]\n'), ], ) def test_notification_rules__no_rules(b2_cli, bucket, args, expected_stdout): b2_cli.run( - ["bucket", "notification-rule", "list", f"b2://{bucket}", *args], + ['bucket', 'notification-rule', 'list', f'b2://{bucket}', *args], expected_stdout=expected_stdout, ) -@pytest.mark.parametrize("command", [["bucket", "notification-rule"], ["notification-rules"]]) +@pytest.mark.parametrize('command', [['bucket', 'notification-rule'], ['notification-rules']]) def test_notification_rules__disable_enable(b2_cli, bucket, bucket_notification_rule, command): _, stdout, _ = b2_cli.run( [ *command, - "disable", - "--json", - f"b2://{bucket}", - bucket_notification_rule["name"], + 'disable', + '--json', + f'b2://{bucket}', + bucket_notification_rule['name'], ], ) - assert json.loads(stdout) == {**bucket_notification_rule, "isEnabled": False} + assert json.loads(stdout) == {**bucket_notification_rule, 'isEnabled': False} _, stdout, _ = b2_cli.run( [ *command, - "enable", - "--json", - f"b2://{bucket}", - bucket_notification_rule["name"], + 'enable', + '--json', + f'b2://{bucket}', + bucket_notification_rule['name'], ], ) - assert json.loads(stdout) == {**bucket_notification_rule, "isEnabled": True} + assert json.loads(stdout) == {**bucket_notification_rule, 'isEnabled': True} @pytest.mark.parametrize( - "subcommand", - ["disable", "enable"], + 'subcommand', + ['disable', 'enable'], ) def test_notification_rules__disable_enable__no_such_rule( b2_cli, @@ -226,10 +231,10 @@ def test_notification_rules__disable_enable__no_such_rule( ): b2_cli.run( [ - "bucket", - "notification-rule", + 'bucket', + 'notification-rule', subcommand, - f"b2://{bucket}", + f'b2://{bucket}', f'{bucket_notification_rule["name"]}-unexisting', ], expected_stderr=( @@ -240,17 +245,17 @@ def test_notification_rules__disable_enable__no_such_rule( ) -@pytest.mark.parametrize("command", [["bucket", "notification-rule"], ["notification-rules"]]) +@pytest.mark.parametrize('command', [['bucket', 'notification-rule'], ['notification-rules']]) def test_notification_rules__sign_secret(b2_cli, bucket, bucket_notification_rule, command): b2_cli.run( [ *command, - "update", - "--json", - f"b2://{bucket}", - bucket_notification_rule["name"], - "--sign-secret", - "new-secret", + 'update', + '--json', + f'b2://{bucket}', + bucket_notification_rule['name'], + '--sign-secret', + 'new-secret', ], expected_status=2, ) @@ -258,17 +263,19 @@ def test_notification_rules__sign_secret(b2_cli, bucket, bucket_notification_rul _, stdout, _ = b2_cli.run( [ *command, - "update", - "--json", - f"b2://{bucket}", - bucket_notification_rule["name"], - "--sign-secret", - "7" * 32, + 'update', + '--json', + f'b2://{bucket}', + bucket_notification_rule['name'], + '--sign-secret', + '7' * 32, ], ) - bucket_notification_rule["targetConfiguration"]["hmacSha256SigningSecret"] = "7" * 32 + bucket_notification_rule['targetConfiguration']['hmacSha256SigningSecret'] = '7' * 32 assert json.loads(stdout) == bucket_notification_rule - assert json.loads(b2_cli.run([*command, "list", "--json", f"b2://{bucket}"],)[1]) == [ - bucket_notification_rule - ] + assert json.loads( + b2_cli.run( + [*command, 'list', '--json', f'b2://{bucket}'], + )[1] + ) == [bucket_notification_rule] diff --git a/test/unit/console_tool/test_rm.py b/test/unit/console_tool/test_rm.py index 61ffe3f6d..a7b01c313 100644 --- a/test/unit/console_tool/test_rm.py +++ b/test/unit/console_tool/test_rm.py @@ -15,13 +15,13 @@ @pytest.mark.apiver(to_ver=3) def test_rm__pre_v4__should_not_rm_exact_match_filename(b2_cli, api_bucket, uploaded_file): """`b2v3 rm bucketName folderName` should not remove file named `folderName` even if such exist""" - b2_cli.run(["rm", uploaded_file['bucket'], uploaded_file['fileName']]) + b2_cli.run(['rm', uploaded_file['bucket'], uploaded_file['fileName']]) assert list(api_bucket.ls()) # nothing was removed @pytest.mark.apiver(from_ver=4) def test_rm__b2_uri__pointing_to_a_file(b2_cli, api_bucket, uploaded_file): - b2_cli.run(["rm", f"b2://{uploaded_file['bucket']}/noSuchFile"]) + b2_cli.run(['rm', f"b2://{uploaded_file['bucket']}/noSuchFile"]) assert list(api_bucket.ls()) # sanity check: bucket is not empty - b2_cli.run(["rm", f"b2://{uploaded_file['bucket']}/{uploaded_file['fileName']}"]) + b2_cli.run(['rm', f"b2://{uploaded_file['bucket']}/{uploaded_file['fileName']}"]) assert not list(api_bucket.ls()) diff --git a/test/unit/console_tool/test_upload_file.py b/test/unit/console_tool/test_upload_file.py index 44ad7750b..f8a687a9f 100644 --- a/test/unit/console_tool/test_upload_file.py +++ b/test/unit/console_tool/test_upload_file.py @@ -8,10 +8,11 @@ # ###################################################################### import os -from test.helpers import skip_on_windows import pytest +from test.helpers import skip_on_windows + def test_upload_file__file_info_src_last_modified_millis_and_headers(b2_cli, bucket, tmpdir): """Test `file upload` supports manually specifying file info src_last_modified_millis""" @@ -21,27 +22,38 @@ def test_upload_file__file_info_src_last_modified_millis_and_headers(b2_cli, buc local_file1.write(content) expected_json = { - "action": "upload", - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "fileInfo": - { - "b2-cache-control": "max-age=3600", - "b2-expires": "Thu, 01 Dec 2050 16:00:00 GMT", - "b2-content-language": "en", - "b2-content-disposition": "attachment", - "b2-content-encoding": "gzip", - "src_last_modified_millis": "1" - }, - "fileName": filename, - "size": len(content), + 'action': 'upload', + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'fileInfo': { + 'b2-cache-control': 'max-age=3600', + 'b2-expires': 'Thu, 01 Dec 2050 16:00:00 GMT', + 'b2-content-language': 'en', + 'b2-content-disposition': 'attachment', + 'b2-content-encoding': 'gzip', + 'src_last_modified_millis': '1', + }, + 'fileName': filename, + 'size': len(content), } b2_cli.run( [ - 'file', 'upload', '--no-progress', '--info=src_last_modified_millis=1', 'my-bucket', - '--cache-control', 'max-age=3600', '--expires', 'Thu, 01 Dec 2050 16:00:00 GMT', - '--content-language', 'en', '--content-disposition', 'attachment', '--content-encoding', + 'file', + 'upload', + '--no-progress', + '--info=src_last_modified_millis=1', + 'my-bucket', + '--cache-control', + 'max-age=3600', + '--expires', + 'Thu, 01 Dec 2050 16:00:00 GMT', + '--content-language', + 'en', + '--content-disposition', + 'attachment', + '--content-encoding', 'gzip', - str(local_file1), 'file1.txt' + str(local_file1), + 'file1.txt', ], expected_json_in_stdout=expected_json, remove_version=True, @@ -61,15 +73,14 @@ def test_upload_file__named_pipe(b2_cli, bucket, tmpdir, bg_executor): expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { - "action": "upload", - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "b2/x-auto", - "fileName": filename, - "size": len(content), + 'action': 'upload', + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'b2/x-auto', + 'fileName': filename, + 'size': len(content), } b2_cli.run( - ['file', 'upload', '--no-progress', 'my-bucket', - str(local_file1), filename], + ['file', 'upload', '--no-progress', 'my-bucket', str(local_file1), filename], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, @@ -88,17 +99,17 @@ def test_upload_file__hyphen_file_instead_of_stdin(b2_cli, bucket, tmpdir, monke expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { - "action": "upload", - "contentSha1": "ab467567b98216a255f77aef08aa2c418073d974", - "fileName": filename, - "size": len(content), + 'action': 'upload', + 'contentSha1': 'ab467567b98216a255f77aef08aa2c418073d974', + 'fileName': filename, + 'size': len(content), } b2_cli.run( ['upload-file', '--no-progress', 'my-bucket', '-', filename], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, - expected_stderr="WARNING: `upload-file` command is deprecated. Use `file upload` instead.\n" + expected_stderr='WARNING: `upload-file` command is deprecated. Use `file upload` instead.\n' "WARNING: Filename `-` won't be supported in the future and will always be treated as stdin alias.\n", ) @@ -111,15 +122,15 @@ def test_upload_file__ignore_hyphen_file(b2_cli, bucket, tmpdir, monkeypatch, mo source_file = tmpdir.join('-') source_file.write(content) - content = "stdin input" + content = 'stdin input' filename = 'stdin.txt' expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { - "action": "upload", - "contentSha1": "2ce72aa159d1f190fddf295cc883f20c4787a751", - "fileName": filename, - "size": len(content), + 'action': 'upload', + 'contentSha1': '2ce72aa159d1f190fddf295cc883f20c4787a751', + 'fileName': filename, + 'size': len(content), } mock_stdin.write(content) mock_stdin.close() @@ -134,15 +145,15 @@ def test_upload_file__ignore_hyphen_file(b2_cli, bucket, tmpdir, monkeypatch, mo def test_upload_file__stdin(b2_cli, bucket, tmpdir, mock_stdin): """Test `file upload` stdin alias support""" - content = "stdin input" + content = 'stdin input' filename = 'stdin.txt' expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { - "action": "upload", - "contentSha1": "2ce72aa159d1f190fddf295cc883f20c4787a751", - "fileName": filename, - "size": len(content), + 'action': 'upload', + 'contentSha1': '2ce72aa159d1f190fddf295cc883f20c4787a751', + 'fileName': filename, + 'size': len(content), } mock_stdin.write(content) mock_stdin.close() @@ -157,15 +168,15 @@ def test_upload_file__stdin(b2_cli, bucket, tmpdir, mock_stdin): def test_upload_file_deprecated__stdin(b2_cli, bucket, tmpdir, mock_stdin): """Test `upload-file` stdin alias support""" - content = "stdin input deprecated" + content = 'stdin input deprecated' filename = 'stdin-deprecated.txt' expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { - "action": "upload", - "contentSha1": "fcaa935e050efe0b5d7b26e65162b32b5e40aa81", - "fileName": filename, - "size": len(content), + 'action': 'upload', + 'contentSha1': 'fcaa935e050efe0b5d7b26e65162b32b5e40aa81', + 'fileName': filename, + 'size': len(content), } mock_stdin.write(content) mock_stdin.close() @@ -188,20 +199,23 @@ def test_upload_file__threads_setting(b2_cli, bucket, tmp_path): local_file1.write_text(content) expected_json = { - "action": "upload", - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "fileInfo": { - "src_last_modified_millis": f"{local_file1.stat().st_mtime_ns // 1000000}" - }, - "fileName": filename, - "size": len(content), + 'action': 'upload', + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'fileInfo': {'src_last_modified_millis': f'{local_file1.stat().st_mtime_ns // 1000000}'}, + 'fileName': filename, + 'size': len(content), } b2_cli.run( [ - 'file', 'upload', '--no-progress', 'my-bucket', '--threads', + 'file', + 'upload', + '--no-progress', + 'my-bucket', + '--threads', str(num_threads), - str(local_file1), 'file1.txt' + str(local_file1), + 'file1.txt', ], expected_json_in_stdout=expected_json, remove_version=True, diff --git a/test/unit/console_tool/test_upload_unbound_stream.py b/test/unit/console_tool/test_upload_unbound_stream.py index ebc6899c2..1bbbfd220 100644 --- a/test/unit/console_tool/test_upload_unbound_stream.py +++ b/test/unit/console_tool/test_upload_unbound_stream.py @@ -8,11 +8,14 @@ # ###################################################################### import os -from test.helpers import skip_on_windows from b2sdk.v2 import DEFAULT_MIN_PART_SIZE -UUS_DEPRECATION_WARNING = 'WARNING: `upload-unbound-stream` command is deprecated. Use `file upload` instead.\n' +from test.helpers import skip_on_windows + +UUS_DEPRECATION_WARNING = ( + 'WARNING: `upload-unbound-stream` command is deprecated. Use `file upload` instead.\n' +) @skip_on_windows @@ -26,14 +29,13 @@ def test_upload_unbound_stream__named_pipe(b2_cli, bucket, tmpdir, bg_executor): expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { - "action": "upload", - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "fileName": filename, - "size": len(content), + 'action': 'upload', + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'fileName': filename, + 'size': len(content), } b2_cli.run( - ['upload-unbound-stream', '--no-progress', 'my-bucket', - str(fifo_file), filename], + ['upload-unbound-stream', '--no-progress', 'my-bucket', str(fifo_file), filename], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, @@ -44,15 +46,15 @@ def test_upload_unbound_stream__named_pipe(b2_cli, bucket, tmpdir, bg_executor): def test_upload_unbound_stream__stdin(b2_cli, bucket, tmpdir, mock_stdin): """Test upload_unbound_stream stdin alias support""" - content = "stdin input" + content = 'stdin input' filename = 'stdin.txt' expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { - "action": "upload", - "contentSha1": "2ce72aa159d1f190fddf295cc883f20c4787a751", - "fileName": filename, - "size": len(content), + 'action': 'upload', + 'contentSha1': '2ce72aa159d1f190fddf295cc883f20c4787a751', + 'fileName': filename, + 'size': len(content), } mock_stdin.write(content) mock_stdin.close() @@ -78,14 +80,14 @@ def test_upload_unbound_stream__with_part_size_options( fifo_file = tmpdir.join('fifo_file.txt') os.mkfifo(str(fifo_file)) writer = bg_executor.submit( - lambda: fifo_file.write("x" * expected_size) + lambda: fifo_file.write('x' * expected_size) ) # writer will block until content is read expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { - "action": "upload", - "fileName": filename, - "size": expected_size, + 'action': 'upload', + 'fileName': filename, + 'size': expected_size, } b2_cli.run( @@ -110,25 +112,24 @@ def test_upload_unbound_stream__with_part_size_options( def test_upload_unbound_stream__regular_file(b2_cli, bucket, tmpdir): """Test upload_unbound_stream regular file support""" - content = "stdin input" + content = 'stdin input' filename = 'file.txt' filepath = tmpdir.join(filename) filepath.write(content) expected_stdout = f'URL by file name: http://download.example.com/file/my-bucket/{filename}' expected_json = { - "action": "upload", - "contentSha1": "2ce72aa159d1f190fddf295cc883f20c4787a751", - "fileName": filename, - "size": len(content), + 'action': 'upload', + 'contentSha1': '2ce72aa159d1f190fddf295cc883f20c4787a751', + 'fileName': filename, + 'size': len(content), } b2_cli.run( - ['upload-unbound-stream', '--no-progress', 'my-bucket', - str(filepath), filename], + ['upload-unbound-stream', '--no-progress', 'my-bucket', str(filepath), filename], expected_json_in_stdout=expected_json, remove_version=True, expected_part_of_stdout=expected_stdout, - expected_stderr=f"{UUS_DEPRECATION_WARNING}" - "WARNING: You are using a stream upload command to upload a regular file. While it will work, it is inefficient. Use of `file upload` command is recommended.\n", + expected_stderr=f'{UUS_DEPRECATION_WARNING}' + 'WARNING: You are using a stream upload command to upload a regular file. While it will work, it is inefficient. Use of `file upload` command is recommended.\n', ) diff --git a/test/unit/test_console_tool.py b/test/unit/test_console_tool.py index 108fba2ea..1dfa31d85 100644 --- a/test/unit/test_console_tool.py +++ b/test/unit/test_console_tool.py @@ -16,7 +16,6 @@ from io import StringIO from itertools import chain, product from tempfile import TemporaryDirectory -from test.helpers import skip_on_windows from typing import List, Optional from unittest import mock @@ -44,6 +43,7 @@ from b2._internal.b2v3.rm import Rm as v3Rm from b2._internal.b2v4.registry import Rm as v4Rm from b2._internal.version import VERSION +from test.helpers import skip_on_windows from .test_base import TestBase @@ -53,7 +53,7 @@ def file_mod_time_millis(path): class BaseConsoleToolTest(TestBase): - RE_API_VERSION = re.compile(r"\/v\d\/") + RE_API_VERSION = re.compile(r'\/v\d\/') json_pattern = re.compile(r'[^{,^\[]*(?P{.*})|(?P\[.*]).*', re.DOTALL) def setUp(self): @@ -106,7 +106,7 @@ def _run_command_ignore_output(self, argv): print('ACTUAL STDERR: ', repr(actual_stderr)) print(actual_stderr) - assert re.match(r'^(|Using https?://[\w.]+)$', actual_stderr), f"stderr: {actual_stderr!r}" + assert re.match(r'^(|Using https?://[\w.]+)$', actual_stderr), f'stderr: {actual_stderr!r}' self.assertEqual(0, actual_status, 'exit status code') def _trim_leading_spaces(self, s): @@ -300,15 +300,15 @@ def test_e_c1_char_ls_default_escape_control_chars_setting(self): self._run_command(['bucket', 'create', 'my-bucket-cc', 'allPrivate'], 'bucket_0\n', '', 0) with TempDir() as temp_dir: - local_file = self._make_local_file(temp_dir, "x") - bad_str = "\u009b2K\u009b7Gb\u009b24Gx\u009b4GH" - escaped_bad_str = "\\x9b2K\\x9b7Gb\\x9b24Gx\\x9b4GH" + local_file = self._make_local_file(temp_dir, 'x') + bad_str = '\u009b2K\u009b7Gb\u009b24Gx\u009b4GH' + escaped_bad_str = '\\x9b2K\\x9b7Gb\\x9b24Gx\\x9b4GH' self._run_command( ['file', 'upload', '--no-progress', 'my-bucket-cc', local_file, bad_str] ) self._run_command( - ['file', 'upload', '--no-progress', 'my-bucket-cc', local_file, "some_normal_text"] + ['file', 'upload', '--no-progress', 'my-bucket-cc', local_file, 'some_normal_text'] ) self._run_command( @@ -322,13 +322,22 @@ def test_camel_case_supported_in_v3(self): self._authorize_account() self._run_command( ['bucket', 'create', 'my-bucket', '--bucketInfo', '{"xxx": "123"}', 'allPrivate'], - 'bucket_0\n', '', 0 + 'bucket_0\n', + '', + 0, ) self._run_command( [ - 'bucket', 'create', 'my-bucket-kebab', '--bucket-info', '{"xxx": "123"}', - 'allPrivate' - ], 'bucket_1\n', '', 0 + 'bucket', + 'create', + 'my-bucket-kebab', + '--bucket-info', + '{"xxx": "123"}', + 'allPrivate', + ], + 'bucket_1\n', + '', + 0, ) @pytest.mark.apiver(from_ver=4) @@ -339,9 +348,16 @@ def test_camel_case_not_supported_in_v4(self): ) self._run_command( [ - 'bucket', 'create', 'my-bucket-kebab', '--bucket-info', '{"xxx": "123"}', - 'allPrivate' - ], 'bucket_0\n', '', 0 + 'bucket', + 'create', + 'my-bucket-kebab', + '--bucket-info', + '{"xxx": "123"}', + 'allPrivate', + ], + 'bucket_0\n', + '', + 0, ) def test_create_key_and_authorize_with_it(self): @@ -383,7 +399,7 @@ def test_create_key_and_authorize_with_it(self): self._run_command( ['authorize-account', 'appKeyId0', 'appKey0'], None, - "WARNING: `authorize-account` command is deprecated. Use `account authorize` instead.\n", + 'WARNING: `authorize-account` command is deprecated. Use `account authorize` instead.\n', 0, ) @@ -391,7 +407,7 @@ def test_create_key_and_authorize_with_it(self): self._run_command( ['authorize-account', 'appKeyId1', 'appKey1'], None, - "WARNING: `authorize-account` command is deprecated. Use `account authorize` instead.\n", + 'WARNING: `authorize-account` command is deprecated. Use `account authorize` instead.\n', 0, ) @@ -403,10 +419,11 @@ def test_create_key_with_authorization_from_env_vars(self): # Setting up environment variables with mock.patch.dict( - 'os.environ', { + 'os.environ', + { B2_APPLICATION_KEY_ID_ENV_VAR: self.account_id, B2_APPLICATION_KEY_ENV_VAR: self.master_key, - } + }, ): assert B2_APPLICATION_KEY_ID_ENV_VAR in os.environ assert B2_APPLICATION_KEY_ENV_VAR in os.environ @@ -428,10 +445,11 @@ def test_create_key_with_authorization_from_env_vars(self): ) with mock.patch.dict( - 'os.environ', { + 'os.environ', + { B2_APPLICATION_KEY_ID_ENV_VAR: 'appKeyId1', B2_APPLICATION_KEY_ENV_VAR: 'appKey1', - } + }, ): # "account authorize" is called when the key changes self._run_command( @@ -443,9 +461,10 @@ def test_create_key_with_authorization_from_env_vars(self): # "account authorize" is also called when the realm changes with mock.patch.dict( - 'os.environ', { + 'os.environ', + { B2_ENVIRONMENT_ENV_VAR: 'http://custom.example.com', - } + }, ): self._run_command( ['key', 'create', 'key1', 'listBuckets,listKeys'], @@ -472,16 +491,14 @@ def test_create_bucket__with_lifecycle_rule(self): self._authorize_account() rule = json.dumps( - { - "daysFromHidingToDeleting": 1, - "daysFromUploadingToHiding": None, - "fileNamePrefix": "" - } + {'daysFromHidingToDeleting': 1, 'daysFromUploadingToHiding': None, 'fileNamePrefix': ''} ) self._run_command( - ['bucket', 'create', 'my-bucket', 'allPrivate', '--lifecycle-rule', rule], 'bucket_0\n', - '', 0 + ['bucket', 'create', 'my-bucket', 'allPrivate', '--lifecycle-rule', rule], + 'bucket_0\n', + '', + 0, ) def test_create_bucket__with_lifecycle_rules(self): @@ -490,34 +507,41 @@ def test_create_bucket__with_lifecycle_rules(self): rules = json.dumps( [ { - "daysFromHidingToDeleting": 1, - "daysFromUploadingToHiding": None, - "fileNamePrefix": "" + 'daysFromHidingToDeleting': 1, + 'daysFromUploadingToHiding': None, + 'fileNamePrefix': '', } ] ) self._run_command( ['bucket', 'create', 'my-bucket', 'allPrivate', '--lifecycle-rules', rules], - 'bucket_0\n', '', 0 + 'bucket_0\n', + '', + 0, ) def test_create_bucket__mutually_exclusive_lifecycle_rules_options(self): self._authorize_account() rule = json.dumps( - { - "daysFromHidingToDeleting": 1, - "daysFromUploadingToHiding": None, - "fileNamePrefix": "" - } + {'daysFromHidingToDeleting': 1, 'daysFromUploadingToHiding': None, 'fileNamePrefix': ''} ) self._run_command( [ - 'bucket', 'create', 'my-bucket', 'allPrivate', '--lifecycle-rule', rule, - '--lifecycle-rules', f"[{rule}]" - ], '', '', 2 + 'bucket', + 'create', + 'my-bucket', + 'allPrivate', + '--lifecycle-rule', + rule, + '--lifecycle-rules', + f'[{rule}]', + ], + '', + '', + 2, ) def test_create_bucket_key_and_authorize_with_it(self): @@ -530,14 +554,17 @@ def test_create_bucket_key_and_authorize_with_it(self): # Create a key restricted to that bucket self._run_command( ['key', 'create', '--bucket', 'my-bucket', 'key1', 'listKeys,listBuckets'], - 'appKeyId0 appKey0\n', '', 0 + 'appKeyId0 appKey0\n', + '', + 0, ) # test deprecated command self._run_command( ['create-key', '--bucket', 'my-bucket', 'key2', 'listKeys,listBuckets'], 'appKeyId1 appKey1\n', - 'WARNING: `create-key` command is deprecated. Use `key create` instead.\n', 0 + 'WARNING: `create-key` command is deprecated. Use `key create` instead.\n', + 0, ) # Authorize with the key @@ -563,24 +590,25 @@ def test_update_bucket_without_lifecycle(self): # Create a bucket with lifecycleRule self._run_command( [ - 'bucket', 'create', '--lifecycle-rule', - '{"daysFromHidingToDeleting": 2, "fileNamePrefix": "foo"}', bucket_name, - 'allPrivate' - ], 'bucket_0\n', '', 0 + 'bucket', + 'create', + '--lifecycle-rule', + '{"daysFromHidingToDeleting": 2, "fileNamePrefix": "foo"}', + bucket_name, + 'allPrivate', + ], + 'bucket_0\n', + '', + 0, ) expected_stdout_dict = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": { - "xxx": "123" - }, - "bucketName": "my-bucket-liferules", - "bucketType": "allPrivate", - "lifecycleRules": [{ - "daysFromHidingToDeleting": 2, - "fileNamePrefix": "foo" - }], + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {'xxx': '123'}, + 'bucketName': 'my-bucket-liferules', + 'bucketType': 'allPrivate', + 'lifecycleRules': [{'daysFromHidingToDeleting': 2, 'fileNamePrefix': 'foo'}], } # Update some other attribute than lifecycleRule, which should remain intact @@ -607,8 +635,10 @@ def test_deprecated_clear_account(self): # Clearing the account should remove the auth token # from the account info. self._run_command( - ['clear-account'], '', - 'WARNING: `clear-account` command is deprecated. Use `account clear` instead.\n', 0 + ['clear-account'], + '', + 'WARNING: `clear-account` command is deprecated. Use `account clear` instead.\n', + 0, ) assert self.account_info.get_account_auth_token() is None @@ -627,18 +657,16 @@ def test_buckets(self): # Update one of them expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": {}, - "bucketName": "my-bucket", - "bucketType": "allPublic", - "corsRules": [], - "defaultServerSideEncryption": { - "mode": "none" - }, - "lifecycleRules": [], - "options": [], - "revision": 2 + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {}, + 'bucketName': 'my-bucket', + 'bucketType': 'allPublic', + 'corsRules': [], + 'defaultServerSideEncryption': {'mode': 'none'}, + 'lifecycleRules': [], + 'options': [], + 'revision': 2, } self._run_command( @@ -646,10 +674,10 @@ def test_buckets(self): ) # Make sure they are there - expected_stdout = ''' + expected_stdout = """ bucket_0 allPublic my-bucket bucket_1 allPrivate your-bucket - ''' + """ self._run_command(['bucket', 'list'], expected_stdout, '', 0) @@ -670,54 +698,59 @@ def test_deprecated_bucket_commands(self): # Make two buckets self._run_command( - ['create-bucket', 'my-bucket', 'allPrivate'], 'bucket_0\n', - 'WARNING: `create-bucket` command is deprecated. Use `bucket create` instead.\n', 0 + ['create-bucket', 'my-bucket', 'allPrivate'], + 'bucket_0\n', + 'WARNING: `create-bucket` command is deprecated. Use `bucket create` instead.\n', + 0, ) self._run_command( - ['create-bucket', 'your-bucket', 'allPrivate'], 'bucket_1\n', - 'WARNING: `create-bucket` command is deprecated. Use `bucket create` instead.\n', 0 + ['create-bucket', 'your-bucket', 'allPrivate'], + 'bucket_1\n', + 'WARNING: `create-bucket` command is deprecated. Use `bucket create` instead.\n', + 0, ) # Update one of them expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": {}, - "bucketName": "my-bucket", - "bucketType": "allPublic", - "corsRules": [], - "defaultServerSideEncryption": { - "mode": "none" - }, - "lifecycleRules": [], - "options": [], - "revision": 2 + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {}, + 'bucketName': 'my-bucket', + 'bucketType': 'allPublic', + 'corsRules': [], + 'defaultServerSideEncryption': {'mode': 'none'}, + 'lifecycleRules': [], + 'options': [], + 'revision': 2, } self._run_command( ['update-bucket', 'my-bucket', 'allPublic'], - expected_stderr= - 'WARNING: `update-bucket` command is deprecated. Use `bucket update` instead.\n', - expected_json_in_stdout=expected_json + expected_stderr='WARNING: `update-bucket` command is deprecated. Use `bucket update` instead.\n', + expected_json_in_stdout=expected_json, ) # Make sure they are there - expected_stdout = ''' + expected_stdout = """ bucket_0 allPublic my-bucket bucket_1 allPrivate your-bucket - ''' + """ self._run_command( - ['list-buckets'], expected_stdout, - 'WARNING: `list-buckets` command is deprecated. Use `bucket list` instead.\n', 0 + ['list-buckets'], + expected_stdout, + 'WARNING: `list-buckets` command is deprecated. Use `bucket list` instead.\n', + 0, ) # Delete one expected_stdout = '' self._run_command( - ['delete-bucket', 'your-bucket'], expected_stdout, - 'WARNING: `delete-bucket` command is deprecated. Use `bucket delete` instead.\n', 0 + ['delete-bucket', 'your-bucket'], + expected_stdout, + 'WARNING: `delete-bucket` command is deprecated. Use `bucket delete` instead.\n', + 0, ) def test_encrypted_buckets(self): @@ -727,63 +760,65 @@ def test_encrypted_buckets(self): self._run_command(['bucket', 'create', 'my-bucket', 'allPrivate'], 'bucket_0\n', '', 0) self._run_command( [ - 'bucket', 'create', '--default-server-side-encryption=SSE-B2', 'your-bucket', - 'allPrivate' - ], 'bucket_1\n', '', 0 + 'bucket', + 'create', + '--default-server-side-encryption=SSE-B2', + 'your-bucket', + 'allPrivate', + ], + 'bucket_1\n', + '', + 0, ) # Update the one without encryption expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": {}, - "bucketName": "my-bucket", - "bucketType": "allPublic", - "corsRules": [], - "defaultServerSideEncryption": { - "algorithm": "AES256", - "mode": "SSE-B2" - }, - "lifecycleRules": [], - "options": [], - "revision": 2 + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {}, + 'bucketName': 'my-bucket', + 'bucketType': 'allPublic', + 'corsRules': [], + 'defaultServerSideEncryption': {'algorithm': 'AES256', 'mode': 'SSE-B2'}, + 'lifecycleRules': [], + 'options': [], + 'revision': 2, } self._run_command( [ - 'bucket', 'update', '--default-server-side-encryption=SSE-B2', 'my-bucket', - 'allPublic' + 'bucket', + 'update', + '--default-server-side-encryption=SSE-B2', + 'my-bucket', + 'allPublic', ], expected_json_in_stdout=expected_json, ) # Update the one with encryption expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_1", - "bucketInfo": {}, - "bucketName": "your-bucket", - "bucketType": "allPrivate", - "corsRules": [], - "defaultServerSideEncryption": { - "algorithm": "AES256", - "mode": "SSE-B2" - }, - "lifecycleRules": [], - "options": [], - "revision": 2 + 'accountId': self.account_id, + 'bucketId': 'bucket_1', + 'bucketInfo': {}, + 'bucketName': 'your-bucket', + 'bucketType': 'allPrivate', + 'corsRules': [], + 'defaultServerSideEncryption': {'algorithm': 'AES256', 'mode': 'SSE-B2'}, + 'lifecycleRules': [], + 'options': [], + 'revision': 2, } self._run_command( - ['bucket', 'update', 'your-bucket', 'allPrivate'], - expected_json_in_stdout=expected_json + ['bucket', 'update', 'your-bucket', 'allPrivate'], expected_json_in_stdout=expected_json ) # Make sure they are there - expected_stdout = ''' + expected_stdout = """ bucket_0 allPublic my-bucket bucket_1 allPrivate your-bucket - ''' + """ self._run_command(['bucket', 'list'], expected_stdout, '', 0) @@ -806,20 +841,28 @@ def test_keys(self): # Make a key with negative validDurationInSeconds expected_stderr = 'ERROR: Bad request: valid duration must be greater than 0, and less than 1000 days in seconds\n' self._run_command( - ['key', 'create', '--duration', '-456', 'goodKeyName', capabilities_with_commas], '', - expected_stderr, 1 + ['key', 'create', '--duration', '-456', 'goodKeyName', capabilities_with_commas], + '', + expected_stderr, + 1, ) # Make a key with validDurationInSeconds outside of range - expected_stderr = 'ERROR: Bad request: valid duration must be greater than 0, ' \ - 'and less than 1000 days in seconds\n' + expected_stderr = ( + 'ERROR: Bad request: valid duration must be greater than 0, ' + 'and less than 1000 days in seconds\n' + ) self._run_command( - ['key', 'create', '--duration', '0', 'goodKeyName', capabilities_with_commas], '', - expected_stderr, 1 + ['key', 'create', '--duration', '0', 'goodKeyName', capabilities_with_commas], + '', + expected_stderr, + 1, ) self._run_command( ['key', 'create', '--duration', '86400001', 'goodKeyName', capabilities_with_commas], - '', expected_stderr, 1 + '', + expected_stderr, + 1, ) # Create three keys @@ -831,8 +874,12 @@ def test_keys(self): ) self._run_command( [ - 'key', 'create', '--bucket', 'my-bucket-a', 'goodKeyName-Two', - capabilities_with_commas + ',readBucketEncryption' + 'key', + 'create', + '--bucket', + 'my-bucket-a', + 'goodKeyName-Two', + capabilities_with_commas + ',readBucketEncryption', ], 'appKeyId1 appKey1\n', '', @@ -840,8 +887,12 @@ def test_keys(self): ) self._run_command( [ - 'key', 'create', '--bucket', 'my-bucket-b', 'goodKeyName-Three', - capabilities_with_commas + 'key', + 'create', + '--bucket', + 'my-bucket-b', + 'goodKeyName-Three', + capabilities_with_commas, ], 'appKeyId2 appKey2\n', '', @@ -855,8 +906,12 @@ def test_keys(self): ) self._run_command( [ - 'key', 'create', '--bucket', 'my-bucket-b', 'goodKeyName-Five', - capabilities_with_commas + 'key', + 'create', + '--bucket', + 'my-bucket-b', + 'goodKeyName-Five', + capabilities_with_commas, ], 'appKeyId4 appKey4\n', '', @@ -874,8 +929,10 @@ def test_keys(self): # test deprecated command self._run_command( - ['delete-key', 'appKeyId5'], 'appKeyId5\n', - 'WARNING: `delete-key` command is deprecated. Use `key delete` instead.\n', 0 + ['delete-key', 'appKeyId5'], + 'appKeyId5\n', + 'WARNING: `delete-key` command is deprecated. Use `key delete` instead.\n', + 0, ) # Delete one bucket, to test listing when a bucket is gone. @@ -900,34 +957,38 @@ def test_keys(self): self._run_command(['key', 'list', '--long'], expected_list_keys_out_long, '', 0) self._run_command( - ['list-keys'], expected_list_keys_out, - 'WARNING: `list-keys` command is deprecated. Use `key list` instead.\n', 0 + ['list-keys'], + expected_list_keys_out, + 'WARNING: `list-keys` command is deprecated. Use `key list` instead.\n', + 0, ) self._run_command( - ['list-keys', '--long'], expected_list_keys_out_long, - 'WARNING: `list-keys` command is deprecated. Use `key list` instead.\n', 0 + ['list-keys', '--long'], + expected_list_keys_out_long, + 'WARNING: `list-keys` command is deprecated. Use `key list` instead.\n', + 0, ) # authorize and make calls using application key with no restrictions self._run_command(['account', 'authorize', 'appKeyId0', 'appKey0'], None, '', 0) self._run_command( ['bucket', 'list'], - 'bucket_0 allPublic my-bucket-a\nbucket_2 allPublic my-bucket-c\n', '', 0 + 'bucket_0 allPublic my-bucket-a\nbucket_2 allPublic my-bucket-c\n', + '', + 0, ) expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": {}, - "bucketName": "my-bucket-a", - "bucketType": "allPublic", - "corsRules": [], - "defaultServerSideEncryption": { - "mode": None - }, - "lifecycleRules": [], - "options": [], - "revision": 1 + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {}, + 'bucketName': 'my-bucket-a', + 'bucketType': 'allPublic', + 'corsRules': [], + 'defaultServerSideEncryption': {'mode': None}, + 'lifecycleRules': [], + 'options': [], + 'revision': 1, } self._run_command(['bucket', 'get', 'my-bucket-a'], expected_json_in_stdout=expected_json) @@ -935,69 +996,71 @@ def test_keys(self): self._run_command(['account', 'authorize', 'appKeyId1', 'appKey1'], None, '', 0) self._run_command( - ['bucket', 'list'], '', 'ERROR: Application key is restricted to bucket: my-bucket-a\n', - 1 + ['bucket', 'list'], + '', + 'ERROR: Application key is restricted to bucket: my-bucket-a\n', + 1, ) self._run_command( - ['bucket', 'get', 'my-bucket-c'], '', - 'ERROR: Application key is restricted to bucket: my-bucket-a\n', 1 + ['bucket', 'get', 'my-bucket-c'], + '', + 'ERROR: Application key is restricted to bucket: my-bucket-a\n', + 1, ) expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": {}, - "bucketName": "my-bucket-a", - "bucketType": "allPublic", - "corsRules": [], - "defaultServerSideEncryption": { - "mode": "none" - }, - "lifecycleRules": [], - "options": [], - "revision": 1 + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {}, + 'bucketName': 'my-bucket-a', + 'bucketType': 'allPublic', + 'corsRules': [], + 'defaultServerSideEncryption': {'mode': 'none'}, + 'lifecycleRules': [], + 'options': [], + 'revision': 1, } self._run_command(['bucket', 'get', 'my-bucket-a'], expected_json_in_stdout=expected_json) self._run_command( - ['ls', '--json', *self.b2_uri_args('my-bucket-c')], '', - 'ERROR: Application key is restricted to bucket: my-bucket-a\n', 1 + ['ls', '--json', *self.b2_uri_args('my-bucket-c')], + '', + 'ERROR: Application key is restricted to bucket: my-bucket-a\n', + 1, ) def test_bucket_info_from_json(self): - self._authorize_account() self._run_command(['bucket', 'create', 'my-bucket', 'allPublic'], 'bucket_0\n', '', 0) bucket_info = {'color': 'blue'} expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": { - "color": "blue" - }, - "bucketName": "my-bucket", - "bucketType": "allPrivate", - "corsRules": [], - "defaultServerSideEncryption": { - "mode": "none" - }, - "lifecycleRules": [], - "options": [], - "revision": 2 + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {'color': 'blue'}, + 'bucketName': 'my-bucket', + 'bucketType': 'allPrivate', + 'corsRules': [], + 'defaultServerSideEncryption': {'mode': 'none'}, + 'lifecycleRules': [], + 'options': [], + 'revision': 2, } self._run_command( [ - 'bucket', 'update', '--bucket-info', - json.dumps(bucket_info), 'my-bucket', 'allPrivate' + 'bucket', + 'update', + '--bucket-info', + json.dumps(bucket_info), + 'my-bucket', + 'allPrivate', ], expected_json_in_stdout=expected_json, ) @pytest.mark.apiver(from_ver=4) def test_rm_fileid_v4(self): - self._authorize_account() self._run_command(['bucket', 'create', 'my-bucket', 'allPublic'], 'bucket_0\n', '', 0) @@ -1012,14 +1075,21 @@ def test_rm_fileid_v4(self): # Upload a file self._run_command( [ - 'file', 'upload', '--no-progress', 'my-bucket', local_file1, 'file1.txt', - '--cache-control=private, max-age=3600' + 'file', + 'upload', + '--no-progress', + 'my-bucket', + local_file1, + 'file1.txt', + '--cache-control=private, max-age=3600', ], remove_version=True, ) # Hide file - self._run_command(['file', 'hide', 'b2://my-bucket/file1.txt'],) + self._run_command( + ['file', 'hide', 'b2://my-bucket/file1.txt'], + ) # Delete one file version self._run_command(['rm', 'b2id://9998']) @@ -1041,31 +1111,33 @@ def test_hide_file_legacy_syntax(self): # Upload a file self._run_command( [ - 'file', 'upload', '--no-progress', 'my-bucket', local_file1, 'file1.txt', - '--cache-control=private, max-age=3600' + 'file', + 'upload', + '--no-progress', + 'my-bucket', + local_file1, + 'file1.txt', + '--cache-control=private, max-age=3600', ], remove_version=True, ) # Get file info expected_json = { - "accountId": self.account_id, - "action": "upload", - "bucketId": "bucket_0", - "size": 11, - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "b2/x-auto", - "fileId": "9999", - "fileInfo": - { - "src_last_modified_millis": "1500111222000", - "b2-cache-control": "private, max-age=3600" - }, - "fileName": "file1.txt", - "serverSideEncryption": { - "mode": "none" + 'accountId': self.account_id, + 'action': 'upload', + 'bucketId': 'bucket_0', + 'size': 11, + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'b2/x-auto', + 'fileId': '9999', + 'fileInfo': { + 'src_last_modified_millis': '1500111222000', + 'b2-cache-control': 'private, max-age=3600', }, - "uploadTimestamp": 5000 + 'fileName': 'file1.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'uploadTimestamp': 5000, } self._run_command( @@ -1075,16 +1147,14 @@ def test_hide_file_legacy_syntax(self): # Hide the file expected_json = { - "action": "hide", - "contentSha1": "none", - "fileId": "9998", - "fileInfo": {}, - "fileName": "file1.txt", - "serverSideEncryption": { - "mode": "none" - }, - "size": 0, - "uploadTimestamp": 5001 + 'action': 'hide', + 'contentSha1': 'none', + 'fileId': '9998', + 'fileInfo': {}, + 'fileName': 'file1.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'size': 0, + 'uploadTimestamp': 5001, } self._run_command( @@ -1093,7 +1163,6 @@ def test_hide_file_legacy_syntax(self): ) def test_files(self): - self._authorize_account() self._run_command(['bucket', 'create', 'my-bucket', 'allPublic'], 'bucket_0\n', '', 0) @@ -1106,31 +1175,33 @@ def test_files(self): self.assertEqual(1500111222, os.path.getmtime(local_file1)) # Upload a file - expected_stdout = ''' + expected_stdout = """ URL by file name: http://download.example.com/file/my-bucket/file1.txt - URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999''' + URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999""" expected_json = { - "action": "upload", - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "b2/x-auto", - "fileId": "9999", - "fileInfo": - { - "src_last_modified_millis": "1500111222000", - "b2-cache-control": "private, max-age=3600" - }, - "fileName": "file1.txt", - "serverSideEncryption": { - "mode": "none" + 'action': 'upload', + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'b2/x-auto', + 'fileId': '9999', + 'fileInfo': { + 'src_last_modified_millis': '1500111222000', + 'b2-cache-control': 'private, max-age=3600', }, - "size": 11, - "uploadTimestamp": 5000 + 'fileName': 'file1.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'size': 11, + 'uploadTimestamp': 5000, } self._run_command( [ - 'file', 'upload', '--no-progress', 'my-bucket', local_file1, 'file1.txt', - '--cache-control=private, max-age=3600' + 'file', + 'upload', + '--no-progress', + 'my-bucket', + local_file1, + 'file1.txt', + '--cache-control=private, max-age=3600', ], expected_json_in_stdout=expected_json, remove_version=True, @@ -1140,23 +1211,20 @@ def test_files(self): # Get file info mod_time_str = str(file_mod_time_millis(local_file1)) expected_json = { - "accountId": self.account_id, - "action": "upload", - "bucketId": "bucket_0", - "size": 11, - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "b2/x-auto", - "fileId": "9999", - "fileInfo": - { - "src_last_modified_millis": "1500111222000", - "b2-cache-control": "private, max-age=3600" - }, - "fileName": "file1.txt", - "serverSideEncryption": { - "mode": "none" + 'accountId': self.account_id, + 'action': 'upload', + 'bucketId': 'bucket_0', + 'size': 11, + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'b2/x-auto', + 'fileId': '9999', + 'fileInfo': { + 'src_last_modified_millis': '1500111222000', + 'b2-cache-control': 'private, max-age=3600', }, - "uploadTimestamp": 5000 + 'fileName': 'file1.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'uploadTimestamp': 5000, } self._run_command( @@ -1166,16 +1234,14 @@ def test_files(self): # Hide the file expected_json = { - "action": "hide", - "contentSha1": "none", - "fileId": "9998", - "fileInfo": {}, - "fileName": "file1.txt", - "serverSideEncryption": { - "mode": "none" - }, - "size": 0, - "uploadTimestamp": 5001 + 'action': 'hide', + 'contentSha1': 'none', + 'fileId': '9998', + 'fileInfo': {}, + 'fileName': 'file1.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'size': 0, + 'uploadTimestamp': 5001, } self._run_command( @@ -1186,33 +1252,29 @@ def test_files(self): # List the file versions expected_json = [ { - "action": "hide", - "contentSha1": "none", - "fileId": "9998", - "fileInfo": {}, - "fileName": "file1.txt", - "serverSideEncryption": { - "mode": "none" - }, - "size": 0, - "uploadTimestamp": 5001 - }, { - "action": "upload", - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "b2/x-auto", - "fileId": "9999", - "fileInfo": - { - "src_last_modified_millis": str(mod_time_str), - "b2-cache-control": "private, max-age=3600" - }, - "fileName": "file1.txt", - "serverSideEncryption": { - "mode": "none" + 'action': 'hide', + 'contentSha1': 'none', + 'fileId': '9998', + 'fileInfo': {}, + 'fileName': 'file1.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'size': 0, + 'uploadTimestamp': 5001, + }, + { + 'action': 'upload', + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'b2/x-auto', + 'fileId': '9999', + 'fileInfo': { + 'src_last_modified_millis': str(mod_time_str), + 'b2-cache-control': 'private, max-age=3600', }, - "size": 11, - "uploadTimestamp": 5000 - } + 'fileName': 'file1.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'size': 11, + 'uploadTimestamp': 5000, + }, ] self._run_command( @@ -1221,36 +1283,33 @@ def test_files(self): ) # List the file names - expected_stdout = ''' + expected_stdout = """ [] - ''' + """ self._run_command( ['ls', '--json', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0 ) # Delete one file version, passing the name in - expected_json = {"action": "delete", "fileId": "9998", "fileName": "file1.txt"} + expected_json = {'action': 'delete', 'fileId': '9998', 'fileName': 'file1.txt'} self._run_command( ['delete-file-version', 'file1.txt', '9998'], - expected_stderr= - 'WARNING: `delete-file-version` command is deprecated. Use `rm` instead.\n', - expected_json_in_stdout=expected_json + expected_stderr='WARNING: `delete-file-version` command is deprecated. Use `rm` instead.\n', + expected_json_in_stdout=expected_json, ) # Delete one file version, not passing the name in - expected_json = {"action": "delete", "fileId": "9999", "fileName": "file1.txt"} + expected_json = {'action': 'delete', 'fileId': '9999', 'fileName': 'file1.txt'} self._run_command( ['delete-file-version', '9999'], - expected_stderr= - 'WARNING: `delete-file-version` command is deprecated. Use `rm` instead.\n', - expected_json_in_stdout=expected_json + expected_stderr='WARNING: `delete-file-version` command is deprecated. Use `rm` instead.\n', + expected_json_in_stdout=expected_json, ) def test_files_encrypted(self): - self._authorize_account() self._run_command(['bucket', 'create', 'my-bucket', 'allPublic'], 'bucket_0\n', '', 0) @@ -1263,31 +1322,30 @@ def test_files_encrypted(self): self.assertEqual(1500111222, os.path.getmtime(local_file1)) # Upload a file - expected_stdout = ''' + expected_stdout = """ URL by file name: http://download.example.com/file/my-bucket/file1.txt - URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999''' + URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999""" expected_json = { - "action": "upload", - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "b2/x-auto", - "fileId": "9999", - "fileInfo": { - "src_last_modified_millis": "1500111222000" - }, - "fileName": "file1.txt", - "serverSideEncryption": { - "algorithm": "AES256", - "mode": "SSE-B2" - }, - "size": 11, - "uploadTimestamp": 5000 + 'action': 'upload', + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'b2/x-auto', + 'fileId': '9999', + 'fileInfo': {'src_last_modified_millis': '1500111222000'}, + 'fileName': 'file1.txt', + 'serverSideEncryption': {'algorithm': 'AES256', 'mode': 'SSE-B2'}, + 'size': 11, + 'uploadTimestamp': 5000, } self._run_command( [ - 'file', 'upload', '--no-progress', - '--destination-server-side-encryption=SSE-B2', 'my-bucket', local_file1, - 'file1.txt' + 'file', + 'upload', + '--no-progress', + '--destination-server-side-encryption=SSE-B2', + 'my-bucket', + local_file1, + 'file1.txt', ], expected_json_in_stdout=expected_json, remove_version=True, @@ -1297,22 +1355,17 @@ def test_files_encrypted(self): # Get file info mod_time_str = str(file_mod_time_millis(local_file1)) expected_json = { - "accountId": self.account_id, - "action": "upload", - "bucketId": "bucket_0", - "size": 11, - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "b2/x-auto", - "fileId": "9999", - "fileInfo": { - "src_last_modified_millis": "1500111222000" - }, - "fileName": "file1.txt", - "serverSideEncryption": { - "algorithm": "AES256", - "mode": "SSE-B2" - }, - "uploadTimestamp": 5000 + 'accountId': self.account_id, + 'action': 'upload', + 'bucketId': 'bucket_0', + 'size': 11, + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'b2/x-auto', + 'fileId': '9999', + 'fileInfo': {'src_last_modified_millis': '1500111222000'}, + 'fileName': 'file1.txt', + 'serverSideEncryption': {'algorithm': 'AES256', 'mode': 'SSE-B2'}, + 'uploadTimestamp': 5000, } self._run_command( @@ -1322,21 +1375,19 @@ def test_files_encrypted(self): self._run_command( ['file-info', 'b2id://9999'], - expected_stderr= - 'WARNING: `file-info` command is deprecated. Use `file info` instead.\n', + expected_stderr='WARNING: `file-info` command is deprecated. Use `file info` instead.\n', expected_json_in_stdout=expected_json, ) self._run_command( ['get-file-info', '9999'], - expected_stderr= - 'WARNING: `get-file-info` command is deprecated. Use `file info` instead.\n', + expected_stderr='WARNING: `get-file-info` command is deprecated. Use `file info` instead.\n', expected_json_in_stdout=expected_json, ) # Download by name local_download1 = os.path.join(temp_dir, 'download1.txt') - expected_stdout_template = ''' + expected_stdout_template = """ File name: file1.txt File id: 9999 Output file path: {output_path} @@ -1349,14 +1400,16 @@ def test_files_encrypted(self): INFO src_last_modified_millis: 1500111222000 Checksum matches Download finished - ''' + """ expected_stdout = expected_stdout_template.format( output_path=pathlib.Path(local_download1).resolve() ) self._run_command( ['file', 'download', '--no-progress', 'b2://my-bucket/file1.txt', local_download1], - expected_stdout, '', 0 + expected_stdout, + '', + 0, ) self.assertEqual(b'hello world', self._read_file(local_download1)) self.assertEqual(mod_time, int(round(os.path.getmtime(local_download1)))) @@ -1368,22 +1421,22 @@ def test_files_encrypted(self): ) self._run_command( ['file', 'download', '--no-progress', 'b2id://9999', local_download2], - expected_stdout, '', 0 + expected_stdout, + '', + 0, ) self.assertEqual(b'hello world', self._read_file(local_download2)) # Hide the file expected_json = { - "action": "hide", - "contentSha1": "none", - "fileId": "9998", - "fileInfo": {}, - "fileName": "file1.txt", - "serverSideEncryption": { - "mode": "none" - }, - "size": 0, - "uploadTimestamp": 5001 + 'action': 'hide', + 'contentSha1': 'none', + 'fileId': '9998', + 'fileInfo': {}, + 'fileName': 'file1.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'size': 0, + 'uploadTimestamp': 5001, } self._run_command( @@ -1394,32 +1447,26 @@ def test_files_encrypted(self): # List the file versions expected_json = [ { - "action": "hide", - "contentSha1": "none", - "fileId": "9998", - "fileInfo": {}, - "fileName": "file1.txt", - "serverSideEncryption": { - "mode": "none" - }, - "size": 0, - "uploadTimestamp": 5001 - }, { - "action": "upload", - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "b2/x-auto", - "fileId": "9999", - "fileInfo": { - "src_last_modified_millis": str(mod_time_str) - }, - "fileName": "file1.txt", - "serverSideEncryption": { - "algorithm": "AES256", - "mode": "SSE-B2" - }, - "size": 11, - "uploadTimestamp": 5000 - } + 'action': 'hide', + 'contentSha1': 'none', + 'fileId': '9998', + 'fileInfo': {}, + 'fileName': 'file1.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'size': 0, + 'uploadTimestamp': 5001, + }, + { + 'action': 'upload', + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'b2/x-auto', + 'fileId': '9999', + 'fileInfo': {'src_last_modified_millis': str(mod_time_str)}, + 'fileName': 'file1.txt', + 'serverSideEncryption': {'algorithm': 'AES256', 'mode': 'SSE-B2'}, + 'size': 11, + 'uploadTimestamp': 5000, + }, ] self._run_command( @@ -1428,31 +1475,29 @@ def test_files_encrypted(self): ) # List the file names - expected_stdout = ''' + expected_stdout = """ [] - ''' + """ self._run_command( ['ls', '--json', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0 ) # Delete one file version, passing the name in - expected_json = {"action": "delete", "fileId": "9998", "fileName": "file1.txt"} + expected_json = {'action': 'delete', 'fileId': '9998', 'fileName': 'file1.txt'} self._run_command( ['delete-file-version', 'file1.txt', '9998'], - expected_stderr= - 'WARNING: `delete-file-version` command is deprecated. Use `rm` instead.\n', - expected_json_in_stdout=expected_json + expected_stderr='WARNING: `delete-file-version` command is deprecated. Use `rm` instead.\n', + expected_json_in_stdout=expected_json, ) # Delete one file version, not passing the name in - expected_json = {"action": "delete", "fileId": "9999", "fileName": "file1.txt"} + expected_json = {'action': 'delete', 'fileId': '9999', 'fileName': 'file1.txt'} self._run_command( ['delete-file-version', '9999'], - expected_stderr= - 'WARNING: `delete-file-version` command is deprecated. Use `rm` instead.\n', + expected_stderr='WARNING: `delete-file-version` command is deprecated. Use `rm` instead.\n', expected_json_in_stdout=expected_json, ) @@ -1486,8 +1531,7 @@ def _test_download_to_directory(self, download_by: str): command += [target_directory] self._run_command(command) self.assertEqual( - local_file_content, - self._read_file(os.path.join(target_directory, source_filename)) + local_file_content, self._read_file(os.path.join(target_directory, source_filename)) ) # Download the file second time, to check the override behavior. @@ -1512,7 +1556,9 @@ def test_get_download_auth_defaults(self): self._create_my_bucket() self._run_command( ['bucket', 'get-download-auth', 'my-bucket'], - 'fake_download_auth_token_bucket_0__86400\n', '', 0 + 'fake_download_auth_token_bucket_0__86400\n', + '', + 0, ) def test_get_download_auth_explicit(self): @@ -1520,9 +1566,17 @@ def test_get_download_auth_explicit(self): self._create_my_bucket() self._run_command( [ - 'bucket', 'get-download-auth', '--prefix', 'prefix', '--duration', '12345', - 'my-bucket' - ], 'fake_download_auth_token_bucket_0_prefix_12345\n', '', 0 + 'bucket', + 'get-download-auth', + '--prefix', + 'prefix', + '--duration', + '12345', + 'my-bucket', + ], + 'fake_download_auth_token_bucket_0_prefix_12345\n', + '', + 0, ) def test_get_download_auth_url(self): @@ -1532,12 +1586,13 @@ def test_get_download_auth_url(self): ['get-download-url-with-auth', '--duration', '12345', 'my-bucket', 'my-file'], 'http://download.example.com/file/my-bucket/my-file?Authorization=fake_download_auth_token_bucket_0_my-file_12345\n', 'WARNING: `get-download-url-with-auth` command is deprecated. Use `file url` instead.\n', - 0 + 0, ) self._run_command( ['file', 'url', '--with-auth', '--duration', '12345', 'b2://my-bucket/my-file'], 'http://download.example.com/file/my-bucket/my-file?Authorization=fake_download_auth_token_bucket_0_my-file_12345\n', - '', 0 + '', + 0, ) def test_get_download_auth_url_with_encoding(self): @@ -1547,21 +1602,23 @@ def test_get_download_auth_url_with_encoding(self): ['get-download-url-with-auth', '--duration', '12345', 'my-bucket', '\u81ea'], 'http://download.example.com/file/my-bucket/%E8%87%AA?Authorization=fake_download_auth_token_bucket_0_%E8%87%AA_12345\n', 'WARNING: `get-download-url-with-auth` command is deprecated. Use `file url` instead.\n', - 0 + 0, ) self._run_command( ['file', 'url', '--with-auth', '--duration', '12345', 'b2://my-bucket/\u81ea'], 'http://download.example.com/file/my-bucket/%E8%87%AA?Authorization=fake_download_auth_token_bucket_0_%E8%87%AA_12345\n', - '', 0 + '', + 0, ) def test_list_unfinished_large_files_with_none(self): self._authorize_account() self._create_my_bucket() self._run_command( - ['list-unfinished-large-files', 'my-bucket'], '', + ['list-unfinished-large-files', 'my-bucket'], + '', 'WARNING: `list-unfinished-large-files` command is deprecated. Use `file large unfinished list` instead.\n', - 0 + 0, ) def test_upload_large_file(self): @@ -1576,31 +1633,34 @@ def test_upload_large_file(self): with open(file_path, 'wb') as f: f.write(text.encode('utf-8')) mod_time_str = str(file_mod_time_millis(file_path)) - expected_stdout = ''' + expected_stdout = """ URL by file name: http://download.example.com/file/my-bucket/test.txt - URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999''' + URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999""" expected_json = { - "action": "upload", - "contentSha1": "none", - "contentType": "b2/x-auto", - "fileId": "9999", - "fileInfo": - { - "large_file_sha1": "cc8954ec25e0c564b6a693fb22200e4f832c18e8", - "src_last_modified_millis": str(mod_time_str) - }, - "fileName": "test.txt", - "serverSideEncryption": { - "mode": "none" + 'action': 'upload', + 'contentSha1': 'none', + 'contentType': 'b2/x-auto', + 'fileId': '9999', + 'fileInfo': { + 'large_file_sha1': 'cc8954ec25e0c564b6a693fb22200e4f832c18e8', + 'src_last_modified_millis': str(mod_time_str), }, - "size": 600, - "uploadTimestamp": 5000 + 'fileName': 'test.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'size': 600, + 'uploadTimestamp': 5000, } self._run_command( [ - 'file', 'upload', '--no-progress', '--threads', '5', 'my-bucket', file_path, - 'test.txt' + 'file', + 'upload', + '--no-progress', + '--threads', + '5', + 'my-bucket', + file_path, + 'test.txt', ], expected_json_in_stdout=expected_json, remove_version=True, @@ -1619,33 +1679,35 @@ def test_upload_large_file_encrypted(self): with open(file_path, 'wb') as f: f.write(text.encode('utf-8')) mod_time_str = str(file_mod_time_millis(file_path)) - expected_stdout = ''' + expected_stdout = """ URL by file name: http://download.example.com/file/my-bucket/test.txt - URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999''' + URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999""" expected_json = { - "action": "upload", - "contentSha1": "none", - "contentType": "b2/x-auto", - "fileId": "9999", - "fileInfo": - { - "large_file_sha1": "cc8954ec25e0c564b6a693fb22200e4f832c18e8", - "src_last_modified_millis": str(mod_time_str) - }, - "fileName": "test.txt", - "serverSideEncryption": { - "algorithm": "AES256", - "mode": "SSE-B2" + 'action': 'upload', + 'contentSha1': 'none', + 'contentType': 'b2/x-auto', + 'fileId': '9999', + 'fileInfo': { + 'large_file_sha1': 'cc8954ec25e0c564b6a693fb22200e4f832c18e8', + 'src_last_modified_millis': str(mod_time_str), }, - "size": 600, - "uploadTimestamp": 5000 + 'fileName': 'test.txt', + 'serverSideEncryption': {'algorithm': 'AES256', 'mode': 'SSE-B2'}, + 'size': 600, + 'uploadTimestamp': 5000, } self._run_command( [ - 'file', 'upload', '--no-progress', - '--destination-server-side-encryption=SSE-B2', '--threads', '5', 'my-bucket', - file_path, 'test.txt' + 'file', + 'upload', + '--no-progress', + '--destination-server-side-encryption=SSE-B2', + '--threads', + '5', + 'my-bucket', + file_path, + 'test.txt', ], expected_json_in_stdout=expected_json, remove_version=True, @@ -1695,21 +1757,21 @@ def test_upload_incremental(self): def test_get_account_info(self): self._authorize_account() expected_json = { - "accountAuthToken": "auth_token_0", - "accountFilePath": getattr(self.account_info, 'filename', - None), # missing in StubAccountInfo in tests - "accountId": self.account_id, - "allowed": - { - "bucketId": None, - "bucketName": None, - "capabilities": sorted(ALL_CAPABILITIES), - "namePrefix": None - }, - "apiUrl": "http://api.example.com", - "applicationKey": self.master_key, - "downloadUrl": "http://download.example.com", - "s3endpoint": "http://s3.api.example.com", + 'accountAuthToken': 'auth_token_0', + 'accountFilePath': getattr( + self.account_info, 'filename', None + ), # missing in StubAccountInfo in tests + 'accountId': self.account_id, + 'allowed': { + 'bucketId': None, + 'bucketName': None, + 'capabilities': sorted(ALL_CAPABILITIES), + 'namePrefix': None, + }, + 'apiUrl': 'http://api.example.com', + 'applicationKey': self.master_key, + 'downloadUrl': 'http://download.example.com', + 's3endpoint': 'http://s3.api.example.com', } self._run_command( ['account', 'get'], @@ -1719,26 +1781,23 @@ def test_get_account_info(self): self._run_command( ['get-account-info'], expected_json_in_stdout=expected_json, - expected_stderr= - 'WARNING: `get-account-info` command is deprecated. Use `account get` instead.\n', + expected_stderr='WARNING: `get-account-info` command is deprecated. Use `account get` instead.\n', ) def test_get_bucket(self): self._authorize_account() self._create_my_bucket() expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": {}, - "bucketName": "my-bucket", - "bucketType": "allPublic", - "corsRules": [], - "defaultServerSideEncryption": { - "mode": "none" - }, - "lifecycleRules": [], - "options": [], - "revision": 1 + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {}, + 'bucketName': 'my-bucket', + 'bucketType': 'allPublic', + 'corsRules': [], + 'defaultServerSideEncryption': {'mode': 'none'}, + 'lifecycleRules': [], + 'options': [], + 'revision': 1, } self._run_command( ['bucket', 'get', 'my-bucket'], @@ -1749,20 +1808,18 @@ def test_get_bucket_empty_show_size(self): self._authorize_account() self._create_my_bucket() expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": {}, - "bucketName": "my-bucket", - "bucketType": "allPublic", - "corsRules": [], - "defaultServerSideEncryption": { - "mode": "none" - }, - "fileCount": 0, - "lifecycleRules": [], - "options": [], - "revision": 1, - "totalSize": 0 + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {}, + 'bucketName': 'my-bucket', + 'bucketType': 'allPublic', + 'corsRules': [], + 'defaultServerSideEncryption': {'mode': 'none'}, + 'fileCount': 0, + 'lifecycleRules': [], + 'options': [], + 'revision': 1, + 'totalSize': 0, } self._run_command( ['bucket', 'get', '--show-size', 'my-bucket'], @@ -1776,23 +1833,19 @@ def test_get_bucket_one_item_show_size(self): # Upload a standard test file. local_file1 = self._make_local_file(temp_dir, 'file1.txt') mod_time_str = str(file_mod_time_millis(local_file1)) - expected_stdout = ''' + expected_stdout = """ URL by file name: http://download.example.com/file/my-bucket/file1.txt - URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999''' + URL by fileId: http://download.example.com/b2api/vx/b2_download_file_by_id?fileId=9999""" expected_json = { - "action": "upload", - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "b2/x-auto", - "fileId": "9999", - "fileInfo": { - "src_last_modified_millis": str(mod_time_str) - }, - "fileName": "file1.txt", - "serverSideEncryption": { - "mode": "none" - }, - "size": 11, - "uploadTimestamp": 5000 + 'action': 'upload', + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'b2/x-auto', + 'fileId': '9999', + 'fileInfo': {'src_last_modified_millis': str(mod_time_str)}, + 'fileName': 'file1.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'size': 11, + 'uploadTimestamp': 5000, } self._run_command( ['file', 'upload', '--no-progress', 'my-bucket', local_file1, 'file1.txt'], @@ -1803,20 +1856,18 @@ def test_get_bucket_one_item_show_size(self): # Now check the output of `bucket get` against the canon. expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": {}, - "bucketName": "my-bucket", - "bucketType": "allPublic", - "corsRules": [], - "defaultServerSideEncryption": { - "mode": "none" - }, - "fileCount": 1, - "lifecycleRules": [], - "options": [], - "revision": 1, - "totalSize": 11 + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {}, + 'bucketName': 'my-bucket', + 'bucketType': 'allPublic', + 'corsRules': [], + 'defaultServerSideEncryption': {'mode': 'none'}, + 'fileCount': 1, + 'lifecycleRules': [], + 'options': [], + 'revision': 1, + 'totalSize': 11, } self._run_command( ['bucket', 'get', '--show-size', 'my-bucket'], @@ -1842,20 +1893,18 @@ def test_get_bucket_with_versions(self): # Now check the output of `bucket get` against the canon. expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": {}, - "bucketName": "my-bucket", - "bucketType": "allPublic", - "corsRules": [], - "defaultServerSideEncryption": { - "mode": "none" - }, - "fileCount": 10, - "lifecycleRules": [], - "options": [], - "revision": 1, - "totalSize": 40 + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {}, + 'bucketName': 'my-bucket', + 'bucketType': 'allPublic', + 'corsRules': [], + 'defaultServerSideEncryption': {'mode': 'none'}, + 'fileCount': 10, + 'lifecycleRules': [], + 'options': [], + 'revision': 1, + 'totalSize': 40, } self._run_command( ['bucket', 'get', '--show-size', 'my-bucket'], @@ -1892,20 +1941,18 @@ def test_get_bucket_with_folders(self): # Now check the output of `bucket get` against the canon. expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": {}, - "bucketName": "my-bucket", - "bucketType": "allPublic", - "corsRules": [], - "defaultServerSideEncryption": { - "mode": "none" - }, - "fileCount": 20, - "lifecycleRules": [], - "options": [], - "revision": 1, - "totalSize": 90 + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {}, + 'bucketName': 'my-bucket', + 'bucketType': 'allPublic', + 'corsRules': [], + 'defaultServerSideEncryption': {'mode': 'none'}, + 'fileCount': 20, + 'lifecycleRules': [], + 'options': [], + 'revision': 1, + 'totalSize': 90, } self._run_command( ['bucket', 'get', '--show-size', 'my-bucket'], @@ -1939,20 +1986,18 @@ def test_get_bucket_with_hidden(self): # Now check the output of `bucket get` against the canon. expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": {}, - "bucketName": "my-bucket", - "bucketType": "allPublic", - "corsRules": [], - "defaultServerSideEncryption": { - "mode": "none" - }, - "fileCount": 9, - "lifecycleRules": [], - "options": [], - "revision": 1, - "totalSize": 24 + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {}, + 'bucketName': 'my-bucket', + 'bucketType': 'allPublic', + 'corsRules': [], + 'defaultServerSideEncryption': {'mode': 'none'}, + 'fileCount': 9, + 'lifecycleRules': [], + 'options': [], + 'revision': 1, + 'totalSize': 24, } self._run_command( ['bucket', 'get', '--show-size', 'my-bucket'], @@ -2005,20 +2050,18 @@ def test_get_bucket_complex(self): # Now check the output of `bucket get` against the canon. expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": {}, - "bucketName": "my-bucket", - "bucketType": "allPublic", - "corsRules": [], - "defaultServerSideEncryption": { - "mode": "none" - }, - "fileCount": 28, - "lifecycleRules": [], - "options": [], - "revision": 1, - "totalSize": 99 + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {}, + 'bucketName': 'my-bucket', + 'bucketType': 'allPublic', + 'corsRules': [], + 'defaultServerSideEncryption': {'mode': 'none'}, + 'fileCount': 28, + 'lifecycleRules': [], + 'options': [], + 'revision': 1, + 'totalSize': 99, } self._run_command( ['bucket', 'get', '--show-size', 'my-bucket'], @@ -2029,26 +2072,30 @@ def test_get_bucket_encrypted(self): self._authorize_account() self._run_command( [ - 'bucket', 'create', '--default-server-side-encryption=SSE-B2', - '--default-server-side-encryption-algorithm=AES256', 'my-bucket', 'allPublic' - ], 'bucket_0\n', '', 0 + 'bucket', + 'create', + '--default-server-side-encryption=SSE-B2', + '--default-server-side-encryption-algorithm=AES256', + 'my-bucket', + 'allPublic', + ], + 'bucket_0\n', + '', + 0, ) expected_json = { - "accountId": self.account_id, - "bucketId": "bucket_0", - "bucketInfo": {}, - "bucketName": "my-bucket", - "bucketType": "allPublic", - "corsRules": [], - "defaultServerSideEncryption": { - "algorithm": "AES256", - "mode": "SSE-B2" - }, - "fileCount": 0, - "lifecycleRules": [], - "options": [], - "revision": 1, - "totalSize": 0 + 'accountId': self.account_id, + 'bucketId': 'bucket_0', + 'bucketInfo': {}, + 'bucketName': 'my-bucket', + 'bucketType': 'allPublic', + 'corsRules': [], + 'defaultServerSideEncryption': {'algorithm': 'AES256', 'mode': 'SSE-B2'}, + 'fileCount': 0, + 'lifecycleRules': [], + 'options': [], + 'revision': 1, + 'totalSize': 0, } self._run_command( ['bucket', 'get', '--show-size', 'my-bucket'], @@ -2063,9 +2110,9 @@ def test_sync(self): file_path = os.path.join(temp_dir, 'test.txt') with open(file_path, 'wb') as f: f.write(b'hello world') - expected_stdout = ''' + expected_stdout = """ upload test.txt - ''' + """ command = ['sync', '--no-progress', temp_dir, 'b2://my-bucket'] self._run_command(command, expected_stdout, '', 0) @@ -2075,8 +2122,9 @@ def test_sync_empty_folder_when_not_enabled(self): self._create_my_bucket() with TempDir() as temp_dir: command = ['sync', '--no-progress', temp_dir, 'b2://my-bucket'] - expected_stderr = 'ERROR: Directory %s is empty. Use --allow-empty-source to sync anyway.\n' % fix_windows_path_limit( - temp_dir.replace('\\\\', '\\') + expected_stderr = ( + 'ERROR: Directory %s is empty. Use --allow-empty-source to sync anyway.\n' + % fix_windows_path_limit(temp_dir.replace('\\\\', '\\')) ) self._run_command(command, '', expected_stderr, 1) @@ -2095,24 +2143,24 @@ def test_sync_dry_run(self): temp_file = self._make_local_file(temp_dir, 'test-dry-run.txt') # dry-run - expected_stdout = ''' + expected_stdout = """ upload test-dry-run.txt - ''' + """ command = ['sync', '--no-progress', '--dry-run', temp_dir, 'b2://my-bucket'] self._run_command(command, expected_stdout, '', 0) # file should not have been uploaded - expected_stdout = ''' + expected_stdout = """ [] - ''' + """ self._run_command( ['ls', '--json', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0 ) # upload file - expected_stdout = ''' + expected_stdout = """ upload test-dry-run.txt - ''' + """ command = ['sync', '--no-progress', temp_dir, 'b2://my-bucket'] self._run_command(command, expected_stdout, '', 0) @@ -2120,19 +2168,15 @@ def test_sync_dry_run(self): mtime = file_mod_time_millis(temp_file) expected_json = [ { - "action": "upload", - "contentSha1": "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed", - "contentType": "b2/x-auto", - "fileId": "9999", - "fileInfo": { - "src_last_modified_millis": str(mtime) - }, - "fileName": "test-dry-run.txt", - "serverSideEncryption": { - "mode": "none" - }, - "size": 11, - "uploadTimestamp": 5000 + 'action': 'upload', + 'contentSha1': '2aae6c35c94fcfb415dbe95f408b9ce91ee846ed', + 'contentType': 'b2/x-auto', + 'fileId': '9999', + 'fileInfo': {'src_last_modified_millis': str(mtime)}, + 'fileName': 'test-dry-run.txt', + 'serverSideEncryption': {'mode': 'none'}, + 'size': 11, + 'uploadTimestamp': 5000, } ] self._run_command( @@ -2147,12 +2191,16 @@ def test_sync_exclude_all_symlinks(self): with TempDir() as temp_dir: self._make_local_file(temp_dir, 'test.txt') os.symlink('test.txt', os.path.join(temp_dir, 'alink')) - expected_stdout = ''' + expected_stdout = """ upload test.txt - ''' + """ command = [ - 'sync', '--no-progress', '--exclude-all-symlinks', temp_dir, 'b2://my-bucket' + 'sync', + '--no-progress', + '--exclude-all-symlinks', + temp_dir, + 'b2://my-bucket', ] self._run_command(command, expected_stdout, '', 0) @@ -2164,9 +2212,9 @@ def test_sync_dont_exclude_all_symlinks(self): self._make_local_file(temp_dir, 'test.txt') os.symlink('test.txt', os.path.join(temp_dir, 'alink')) # Exact stdout cannot be asserted because line order is non-deterministic - expected_part_of_stdout = ''' + expected_part_of_stdout = """ upload alink - ''' + """ command = ['sync', '--no-progress', temp_dir, 'b2://my-bucket'] self._run_command(command, expected_part_of_stdout=expected_part_of_stdout) @@ -2181,13 +2229,17 @@ def test_sync_exclude_if_modified_after_in_range(self): path = os.path.join(temp_dir, file) os.utime(path, (mtime, mtime)) - expected_stdout = ''' + expected_stdout = """ upload test2.txt - ''' + """ command = [ - 'sync', '--no-progress', '--exclude-if-modified-after', '1367700664.152', temp_dir, - 'b2://my-bucket' + 'sync', + '--no-progress', + '--exclude-if-modified-after', + '1367700664.152', + temp_dir, + 'b2://my-bucket', ] self._run_command(command, expected_stdout, '', 0) @@ -2201,13 +2253,17 @@ def test_sync_exclude_if_modified_after_exact(self): path = os.path.join(temp_dir, file) os.utime(path, (mtime, mtime)) - expected_stdout = ''' + expected_stdout = """ upload test2.txt - ''' + """ command = [ - 'sync', '--no-progress', '--exclude-if-modified-after', '1367600664.152', temp_dir, - 'b2://my-bucket' + 'sync', + '--no-progress', + '--exclude-if-modified-after', + '1367600664.152', + temp_dir, + 'b2://my-bucket', ] self._run_command(command, expected_stdout, '', 0) @@ -2219,19 +2275,29 @@ def test_sync_exclude_if_uploaded_after_in_range(self): for file, utime in (('test.txt', 1367900664152), ('test2.txt', 1367600664152)): file_path = self._make_local_file(temp_dir, file) command = [ - 'file', 'upload', '--no-progress', '--custom-upload-timestamp', - str(utime), 'my-bucket', file_path, file + 'file', + 'upload', + '--no-progress', + '--custom-upload-timestamp', + str(utime), + 'my-bucket', + file_path, + file, ] self._run_command(command, expected_status=0) with TemporaryDirectory() as temp_dir: command = [ - 'sync', '--no-progress', '--exclude-if-uploaded-after', '1367700664.152', - 'b2://my-bucket', temp_dir + 'sync', + '--no-progress', + '--exclude-if-uploaded-after', + '1367700664.152', + 'b2://my-bucket', + temp_dir, ] - expected_stdout = ''' + expected_stdout = """ dnload test2.txt - ''' + """ self._run_command(command, expected_stdout, '', 0) def test_sync_exclude_if_uploaded_after_exact(self): @@ -2242,19 +2308,29 @@ def test_sync_exclude_if_uploaded_after_exact(self): for file, utime in (('test.txt', 1367900664152), ('test2.txt', 1367600664152)): file_path = self._make_local_file(temp_dir, file) command = [ - 'file', 'upload', '--no-progress', '--custom-upload-timestamp', - str(utime), 'my-bucket', file_path, file + 'file', + 'upload', + '--no-progress', + '--custom-upload-timestamp', + str(utime), + 'my-bucket', + file_path, + file, ] self._run_command(command, expected_status=0) with TemporaryDirectory() as temp_dir: command = [ - 'sync', '--no-progress', '--exclude-if-uploaded-after', '1367600664.152', - 'b2://my-bucket', temp_dir + 'sync', + '--no-progress', + '--exclude-if-uploaded-after', + '1367600664.152', + 'b2://my-bucket', + temp_dir, ] - expected_stdout = ''' + expected_stdout = """ dnload test2.txt - ''' + """ self._run_command(command, expected_stdout, '', 0) def _test_sync_threads( @@ -2279,9 +2355,9 @@ def _test_sync_threads( if upload_threads is not None: command += ['--upload-threads', str(upload_threads)] command += [temp_dir, 'b2://my-bucket'] - expected_stdout = ''' + expected_stdout = """ upload file.txt - ''' + """ self._run_command(command, expected_stdout) def test_sync_threads(self): @@ -2335,40 +2411,40 @@ def test_ls(self): bucket.upload(UploadSourceBytes(b' '), 'c') # Condensed output - expected_stdout = ''' + expected_stdout = """ a b/ c - ''' + """ self._run_command(['ls', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0) # Recursive output - expected_stdout = ''' + expected_stdout = """ a b/b1 b/b2 c - ''' + """ self._run_command( ['ls', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0 ) self._run_command(['ls', '-r', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0) # Check long output. (The format expects full-length file ids, so it causes whitespace here) - expected_stdout = ''' + expected_stdout = """ 9999 upload 1970-01-01 00:00:05 0 a - - - - 0 b/ 9995 upload 1970-01-01 00:00:05 6 c - ''' + """ self._run_command(['ls', '--long', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0) # Check long versions output (The format expects full-length file ids, so it causes whitespace here) - expected_stdout = ''' + expected_stdout = """ 9999 upload 1970-01-01 00:00:05 0 a - - - - 0 b/ 9995 upload 1970-01-01 00:00:05 6 c 9996 upload 1970-01-01 00:00:05 5 c - ''' + """ self._run_command( ['ls', '--long', '--versions', *self.b2_uri_args('my-bucket')], expected_stdout, '', 0 ) @@ -2379,15 +2455,17 @@ def test_ls_wildcard(self): # Check with no files self._run_command( - ['ls', '--recursive', '--with-wildcard', *self.b2_uri_args('my-bucket', '*.txt')], '', - '', 0 + ['ls', '--recursive', '--with-wildcard', *self.b2_uri_args('my-bucket', '*.txt')], + '', + '', + 0, ) # Create some files, including files in a folder bucket = self.b2_api.get_bucket_by_name('my-bucket') self._upload_multiple_files(bucket) - expected_stdout = ''' + expected_stdout = """ a/test.csv a/test.tsv b/b/test.csv @@ -2395,52 +2473,56 @@ def test_ls_wildcard(self): b/b2/test.tsv c/test.csv c/test.tsv - ''' + """ self._run_command( ['ls', '--recursive', '--with-wildcard', *self.b2_uri_args('my-bucket', '*.[tc]sv')], expected_stdout, ) - expected_stdout = ''' + expected_stdout = """ a/test.tsv b/b2/test.tsv c/test.tsv - ''' + """ self._run_command( ['ls', '--recursive', '--with-wildcard', *self.b2_uri_args('my-bucket', '*.tsv')], expected_stdout, ) - expected_stdout = ''' + expected_stdout = """ b/b1/test.csv - ''' + """ self._run_command( [ - 'ls', '--recursive', '--with-wildcard', - *self.b2_uri_args('my-bucket', 'b/b?/test.csv') + 'ls', + '--recursive', + '--with-wildcard', + *self.b2_uri_args('my-bucket', 'b/b?/test.csv'), ], expected_stdout, ) - expected_stdout = ''' + expected_stdout = """ a/test.csv a/test.tsv c/test.csv c/test.tsv - ''' + """ self._run_command( ['ls', '--recursive', '--with-wildcard', *self.b2_uri_args('my-bucket', '?/test.?sv')], expected_stdout, ) - expected_stdout = ''' + expected_stdout = """ b/b/test.csv b/b1/test.csv - ''' + """ self._run_command( [ - 'ls', '--recursive', '--with-wildcard', - *self.b2_uri_args('my-bucket', '?/*/*.[!t]sv') + 'ls', + '--recursive', + '--with-wildcard', + *self.b2_uri_args('my-bucket', '?/*/*.[!t]sv'), ], expected_stdout, ) @@ -2476,12 +2558,18 @@ def test_restrictions(self): # Create a key restricted to a bucket app_key_id = 'appKeyId0' app_key = 'appKey0' - capabilities = "listBuckets,readFiles" + capabilities = 'listBuckets,readFiles' file_prefix = 'some/file/prefix/' self._run_command( [ - 'key', 'create', '--bucket', bucket_name, '--name-prefix', file_prefix, 'my-key', - capabilities + 'key', + 'create', + '--bucket', + bucket_name, + '--name-prefix', + file_prefix, + 'my-key', + capabilities, ], app_key_id + ' ' + app_key + '\n', '', @@ -2508,10 +2596,12 @@ def test_restrictions(self): ) # Test that the application key info gets added to the unauthorized error message. - expected_create_key_stderr = "ERROR: unauthorized for application key " \ - "with capabilities 'listBuckets,readFiles', " \ - "restricted to bucket 'restrictedBucket', " \ - "restricted to files that start with 'some/file/prefix/' (unauthorized)\n" + expected_create_key_stderr = ( + 'ERROR: unauthorized for application key ' + "with capabilities 'listBuckets,readFiles', " + "restricted to bucket 'restrictedBucket', " + "restricted to files that start with 'some/file/prefix/' (unauthorized)\n" + ) self._run_command( ['key', 'create', 'goodKeyName-One', 'readFiles,listBuckets'], '', @@ -2591,7 +2681,9 @@ def test_ls_for_restricted_bucket(self): ) # Authorize with the key and list the files - self._run_command_ignore_output(['account', 'authorize', 'appKeyId0', 'appKey0'],) + self._run_command_ignore_output( + ['account', 'authorize', 'appKeyId0', 'appKey0'], + ) self._run_command( ['ls', *self.b2_uri_args('my-bucket')], '', @@ -2604,7 +2696,8 @@ def test_bad_terminal(self): stdout.write = mock.MagicMock( side_effect=[ UnicodeEncodeError('codec', 'foo', 100, 105, 'artificial UnicodeEncodeError') - ] + list(range(25)) + ] + + list(range(25)) ) stderr = mock.MagicMock() console_tool = self.console_tool_class(stdout, stderr) @@ -2614,12 +2707,21 @@ def test_passing_api_parameters(self): self._authorize_account() commands = [ [ - 'b2', 'download-file-by-name', '--profile', 'nonexistent', 'dummy-name', - 'dummy-file-name', 'dummy-local-file-name' + 'b2', + 'download-file-by-name', + '--profile', + 'nonexistent', + 'dummy-name', + 'dummy-file-name', + 'dummy-local-file-name', ], [ - 'b2', 'download-file-by-id', '--profile', 'nonexistent', 'dummy-id', - 'dummy-local-file-name' + 'b2', + 'download-file-by-id', + '--profile', + 'nonexistent', + 'dummy-id', + 'dummy-local-file-name', ], ['b2', 'sync', '--profile', 'nonexistent', 'b2:dummy-source', 'dummy-destination'], ] @@ -2648,13 +2750,13 @@ def test_passing_api_parameters(self): assert download_manager.check_hash is ('--skip-hash-verification' not in params) parallel_strategy = one( - strategy for strategy in download_manager.strategies + strategy + for strategy in download_manager.strategies if isinstance(strategy, download_manager.PARALLEL_DOWNLOADER_CLASS) ) assert parallel_strategy.max_streams == params['--max-download-streams-per-file'] def test_passing_api_parameters_with_auth_env_vars(self): - os.environ[B2_APPLICATION_KEY_ID_ENV_VAR] = self.account_id os.environ[B2_APPLICATION_KEY_ENV_VAR] = self.master_key @@ -2681,7 +2783,8 @@ def test_passing_api_parameters_with_auth_env_vars(self): assert download_manager.check_hash is False parallel_strategy = one( - strategy for strategy in download_manager.strategies + strategy + for strategy in download_manager.strategies if isinstance(strategy, download_manager.PARALLEL_DOWNLOADER_CLASS) ) assert parallel_strategy.max_streams == 5 @@ -2696,9 +2799,9 @@ def test_ls_b2id(self): file_version = bucket.upload(UploadSourceBytes(b''), 'test.txt') # Condensed output - expected_stdout = ''' + expected_stdout = """ test.txt - ''' + """ self._run_command(['ls', f'b2id://{file_version.id_}'], expected_stdout, '', 0) def test_ls_filters(self): @@ -2716,12 +2819,12 @@ def test_ls_filters(self): bucket.upload(data, 'test.csv') bucket.upload(data, 'test.tsv') - expected_stdout = ''' + expected_stdout = """ a/ b/ c/ test.csv - ''' + """ self._run_command( ['ls', *self.b2_uri_args('my-bucket'), '--include', '*.csv'], expected_stdout, @@ -2731,12 +2834,12 @@ def test_ls_filters(self): expected_stdout, ) - expected_stdout = ''' + expected_stdout = """ a/test.csv b/b/test.csv c/test.csv test.csv - ''' + """ self._run_command( ['ls', *self.b2_uri_args('my-bucket'), '--recursive', '--include', '*.csv'], expected_stdout, @@ -2746,20 +2849,27 @@ def test_ls_filters(self): expected_stdout, ) - expected_stdout = ''' + expected_stdout = """ b/b/test.csv c/test.csv test.csv - ''' + """ self._run_command( [ - 'ls', *self.b2_uri_args('my-bucket'), '--recursive', '--exclude', '*', '--include', - '*.csv', '--exclude', 'a/*' + 'ls', + *self.b2_uri_args('my-bucket'), + '--recursive', + '--exclude', + '*', + '--include', + '*.csv', + '--exclude', + 'a/*', ], expected_stdout, ) - @pytest.mark.skip("temporarily disabled") + @pytest.mark.skip('temporarily disabled') @skip_on_windows def test_escape_c0_char_on_sync_stack_trace(self): self._authorize_account() @@ -2767,19 +2877,25 @@ def test_escape_c0_char_on_sync_stack_trace(self): self._run_command(['bucket', 'create', 'my-bucket-1', 'allPrivate'], 'bucket_1\n', '', 0) with TempDir() as temp_dir: - _ = self._make_local_file(temp_dir, "\x1b[32mC\x1b[33mC\x1b[34mI\x1b[0m") + _ = self._make_local_file(temp_dir, '\x1b[32mC\x1b[33mC\x1b[34mI\x1b[0m') self._run_command( [ - 'sync', '--no-progress', '--no-escape-control-characters', temp_dir, - 'b2://my-bucket-0' + 'sync', + '--no-progress', + '--no-escape-control-characters', + temp_dir, + 'b2://my-bucket-0', ], expected_part_of_stdout='\\x1b[32m', expected_status=0, ) self._run_command( [ - 'sync', '--no-progress', '--escape-control-characters', temp_dir, - 'b2://my-bucket-1' + 'sync', + '--no-progress', + '--escape-control-characters', + temp_dir, + 'b2://my-bucket-1', ], expected_part_of_stdout="upload '\\x1b[32mC\\x1b[33mC\\x1b[34mI\\x1b[0m'\n", expected_status=0, @@ -2795,8 +2911,14 @@ def test_escape_c0_char_on_key_restricted_path(self): # Create a key self._run_command( [ - 'key', 'create', '--bucket', 'my-bucket-0', '--name-prefix', cc_name, 'key1', - 'listBuckets,listKeys' + 'key', + 'create', + '--bucket', + 'my-bucket-0', + '--name-prefix', + cc_name, + 'key1', + 'listBuckets,listKeys', ], 'appKeyId0 appKey0\n', expected_status=0, @@ -2808,19 +2930,19 @@ def test_escape_c0_char_on_key_restricted_path(self): self._run_command( ['ls', *self.b2_uri_args('my-bucket-0'), '--no-escape-control-characters'], expected_status=1, - expected_stderr=escaped_error + expected_stderr=escaped_error, ) self._run_command( ['ls', *self.b2_uri_args('my-bucket-0'), '--escape-control-characters'], expected_status=1, - expected_stderr=escaped_error + expected_stderr=escaped_error, ) self._run_command( ['ls', *self.b2_uri_args('my-bucket-0')], expected_status=1, - expected_stderr=escaped_error + expected_stderr=escaped_error, ) def test_escape_c1_char_on_ls_long(self): @@ -2848,7 +2970,7 @@ def test_escape_c1_char_on_ls_long(self): self._run_command( ['ls', '--long', '--escape-control-characters', *self.b2_uri_args('my-bucket-0')], expected_part_of_stdout=escaped_cc_filename, - unexpected_part_of_stdout=cc_filename + unexpected_part_of_stdout=cc_filename, ) def test_escape_c1_char_ls(self): @@ -2856,21 +2978,21 @@ def test_escape_c1_char_ls(self): self._run_command(['bucket', 'create', 'my-bucket-cc', 'allPrivate'], 'bucket_0\n', '', 0) with TempDir() as temp_dir: - local_file = self._make_local_file(temp_dir, "x") - bad_str = "\u009b2K\u009b7Gb\u009b24Gx\u009b4GH" - escaped_bad_str = "\\x9b2K\\x9b7Gb\\x9b24Gx\\x9b4GH" + local_file = self._make_local_file(temp_dir, 'x') + bad_str = '\u009b2K\u009b7Gb\u009b24Gx\u009b4GH' + escaped_bad_str = '\\x9b2K\\x9b7Gb\\x9b24Gx\\x9b4GH' self._run_command( ['file', 'upload', '--no-progress', 'my-bucket-cc', local_file, bad_str] ) self._run_command( - ['file', 'upload', '--no-progress', 'my-bucket-cc', local_file, "some_normal_text"] + ['file', 'upload', '--no-progress', 'my-bucket-cc', local_file, 'some_normal_text'] ) self._run_command( ['ls', *self.b2_uri_args('my-bucket-cc'), '--no-escape-control-characters'], - expected_part_of_stdout=bad_str + expected_part_of_stdout=bad_str, ) self._run_command( @@ -2879,7 +3001,7 @@ def test_escape_c1_char_ls(self): self._run_command( ['ls', *self.b2_uri_args('my-bucket-cc'), '--escape-control-characters'], - expected_part_of_stdout=escaped_bad_str + expected_part_of_stdout=escaped_bad_str, ) @@ -2900,25 +3022,28 @@ def setUp(self): def test_cancel_large_file(self): file = self.v1_bucket.start_large_file('file1', 'text/plain', {}) self._run_command( - ['file', 'large', 'unfinished', 'cancel', f'b2id://{file.file_id}'], '9999 canceled\n', - '', 0 + ['file', 'large', 'unfinished', 'cancel', f'b2id://{file.file_id}'], + '9999 canceled\n', + '', + 0, ) def test_cancel_large_file_deprecated(self): file = self.v1_bucket.start_large_file('file1', 'text/plain', {}) self._run_command( - ['cancel-large-file', file.file_id], '9999 canceled\n', + ['cancel-large-file', file.file_id], + '9999 canceled\n', 'WARNING: `cancel-large-file` command is deprecated. Use `file large unfinished cancel` instead.\n', - 0 + 0, ) def test_cancel_all_large_file(self): self.v1_bucket.start_large_file('file1', 'text/plain', {}) self.v1_bucket.start_large_file('file2', 'text/plain', {}) - expected_stdout = ''' + expected_stdout = """ 9999 canceled 9998 canceled - ''' + """ self._run_command( ['file', 'large', 'unfinished', 'cancel', 'b2://my-v1-bucket'], expected_stdout, '', 0 @@ -2927,15 +3052,16 @@ def test_cancel_all_large_file(self): def test_cancel_all_large_file_deprecated(self): self.v1_bucket.start_large_file('file1', 'text/plain', {}) self.v1_bucket.start_large_file('file2', 'text/plain', {}) - expected_stdout = ''' + expected_stdout = """ 9999 canceled 9998 canceled - ''' + """ self._run_command( - ['cancel-all-unfinished-large-files', 'my-v1-bucket'], expected_stdout, + ['cancel-all-unfinished-large-files', 'my-v1-bucket'], + expected_stdout, 'WARNING: `cancel-all-unfinished-large-files` command is deprecated. Use `file large unfinished cancel` instead.\n', - 0 + 0, ) def test_list_parts_with_none(self): @@ -2945,8 +3071,10 @@ def test_list_parts_with_none(self): def test_list_parts_with_none_deprecated(self): file = self.v1_bucket.start_large_file('file', 'text/plain', {}) self._run_command( - ['list-parts', file.file_id], '', - 'WARNING: `list-parts` command is deprecated. Use `file large parts` instead.\n', 0 + ['list-parts', file.file_id], + '', + 'WARNING: `list-parts` command is deprecated. Use `file large parts` instead.\n', + 0, ) def test_list_parts_with_parts(self): @@ -2956,17 +3084,27 @@ def test_list_parts_with_parts(self): large_file_upload_state = mock.MagicMock() large_file_upload_state.has_error.return_value = False bucket.api.services.upload_manager._upload_part( - bucket.id_, file.file_id, UploadSourceBytes(content), 1, large_file_upload_state, None, - None + bucket.id_, + file.file_id, + UploadSourceBytes(content), + 1, + large_file_upload_state, + None, + None, ) bucket.api.services.upload_manager._upload_part( - bucket.id_, file.file_id, UploadSourceBytes(content), 3, large_file_upload_state, None, - None + bucket.id_, + file.file_id, + UploadSourceBytes(content), + 3, + large_file_upload_state, + None, + None, ) - expected_stdout = ''' + expected_stdout = """ 1 11 2aae6c35c94fcfb415dbe95f408b9ce91ee846ed 3 11 2aae6c35c94fcfb415dbe95f408b9ce91ee846ed - ''' + """ self._run_command( ['file', 'large', 'parts', f'b2id://{file.file_id}'], expected_stdout, '', 0 @@ -2979,21 +3117,33 @@ def test_list_parts_with_parts_deprecated(self): large_file_upload_state = mock.MagicMock() large_file_upload_state.has_error.return_value = False bucket.api.services.upload_manager._upload_part( - bucket.id_, file.file_id, UploadSourceBytes(content), 1, large_file_upload_state, None, - None + bucket.id_, + file.file_id, + UploadSourceBytes(content), + 1, + large_file_upload_state, + None, + None, ) bucket.api.services.upload_manager._upload_part( - bucket.id_, file.file_id, UploadSourceBytes(content), 3, large_file_upload_state, None, - None + bucket.id_, + file.file_id, + UploadSourceBytes(content), + 3, + large_file_upload_state, + None, + None, ) - expected_stdout = ''' + expected_stdout = """ 1 11 2aae6c35c94fcfb415dbe95f408b9ce91ee846ed 3 11 2aae6c35c94fcfb415dbe95f408b9ce91ee846ed - ''' + """ self._run_command( - ['list-parts', file.file_id], expected_stdout, - 'WARNING: `list-parts` command is deprecated. Use `file large parts` instead.\n', 0 + ['list-parts', file.file_id], + expected_stdout, + 'WARNING: `list-parts` command is deprecated. Use `file large parts` instead.\n', + 0, ) def test_list_unfinished_large_files_with_some(self): @@ -3006,11 +3156,11 @@ def test_list_unfinished_large_files_with_some(self): self.raw_api.start_large_file( api_url, auth_token, 'bucket_0', 'file3', 'application/json', {} ) - expected_stdout = ''' + expected_stdout = """ 9999 file1 text/plain 9998 file2 text/plain color=blue 9997 file3 application/json - ''' + """ self._run_command( ['file', 'large', 'unfinished', 'list', 'b2://my-bucket'], expected_stdout, '', 0 @@ -3026,16 +3176,17 @@ def test_list_unfinished_large_files_with_some_deprecated(self): self.raw_api.start_large_file( api_url, auth_token, 'bucket_0', 'file3', 'application/json', {} ) - expected_stdout = ''' + expected_stdout = """ 9999 file1 text/plain 9998 file2 text/plain color=blue 9997 file3 application/json - ''' + """ self._run_command( - ['list-unfinished-large-files', 'my-bucket'], expected_stdout, + ['list-unfinished-large-files', 'my-bucket'], + expected_stdout, 'WARNING: `list-unfinished-large-files` command is deprecated. Use `file large unfinished list` instead.\n', - 0 + 0, ) @@ -3073,17 +3224,20 @@ def tearDownClass(cls) -> None: def test_rm_wildcard(self): self._run_command( [ - 'rm', '--recursive', '--with-wildcard', '--no-progress', - *self.b2_uri_args('my-bucket', '*.csv') + 'rm', + '--recursive', + '--with-wildcard', + '--no-progress', + *self.b2_uri_args('my-bucket', '*.csv'), ], ) - expected_stdout = ''' + expected_stdout = """ a/test.tsv b/b2/test.tsv b/test.txt c/test.tsv - ''' + """ self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout) def test_rm_versions(self): @@ -3092,12 +3246,15 @@ def test_rm_versions(self): self._run_command( [ - 'rm', '--versions', '--recursive', '--with-wildcard', - *self.b2_uri_args('my-bucket', '*.csv') + 'rm', + '--versions', + '--recursive', + '--with-wildcard', + *self.b2_uri_args('my-bucket', '*.csv'), ], ) - expected_stdout = ''' + expected_stdout = """ a/test.tsv a/test.tsv b/b2/test.tsv @@ -3106,7 +3263,7 @@ def test_rm_versions(self): b/test.txt c/test.tsv c/test.tsv - ''' + """ self._run_command( ['ls', '--versions', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout ) @@ -3114,7 +3271,7 @@ def test_rm_versions(self): def test_rm_no_recursive(self): self._run_command(['rm', '--no-progress', *self.b2_uri_args('my-bucket', 'b/')]) - expected_stdout = ''' + expected_stdout = """ a/test.csv a/test.tsv b/b/test.csv @@ -3122,25 +3279,28 @@ def test_rm_no_recursive(self): b/b2/test.tsv c/test.csv c/test.tsv - ''' + """ self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout) def test_rm_dry_run(self): - expected_stdout = ''' + expected_stdout = """ a/test.csv b/b/test.csv b/b1/test.csv c/test.csv - ''' + """ self._run_command( [ - 'rm', '--recursive', '--with-wildcard', '--dry-run', - *self.b2_uri_args('my-bucket', '*.csv') + 'rm', + '--recursive', + '--with-wildcard', + '--dry-run', + *self.b2_uri_args('my-bucket', '*.csv'), ], expected_stdout, ) - expected_stdout = ''' + expected_stdout = """ a/test.csv a/test.tsv b/b/test.csv @@ -3149,18 +3309,21 @@ def test_rm_dry_run(self): b/test.txt c/test.csv c/test.tsv - ''' + """ self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout) def test_rm_exact_filename(self): self._run_command( [ - 'rm', '--recursive', '--with-wildcard', '--no-progress', - *self.b2_uri_args('my-bucket', 'b/b/test.csv') + 'rm', + '--recursive', + '--with-wildcard', + '--no-progress', + *self.b2_uri_args('my-bucket', 'b/b/test.csv'), ], ) - expected_stdout = ''' + expected_stdout = """ a/test.csv a/test.tsv b/b1/test.csv @@ -3168,7 +3331,7 @@ def test_rm_exact_filename(self): b/test.txt c/test.csv c/test.tsv - ''' + """ self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout) def test_rm_no_name_removes_everything(self): @@ -3185,8 +3348,13 @@ def test_rm_with_wildcard_without_recursive(self): def test_rm_queue_size_and_number_of_threads(self): self._run_command( [ - 'rm', '--recursive', '--threads', '2', '--queue-size', '4', - *self.b2_uri_args('my-bucket') + 'rm', + '--recursive', + '--threads', + '2', + '--queue-size', + '4', + *self.b2_uri_args('my-bucket'), ] ) self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], '') @@ -3198,19 +3366,19 @@ def test_rm_progress(self): expected_part_of_stdout=expected_in_stdout, ) - expected_stdout = ''' + expected_stdout = """ a/test.tsv b/b2/test.tsv b/test.txt c/test.tsv - ''' + """ self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout) def _run_problematic_removal( self, additional_parameters: Optional[List[str]] = None, expected_in_stdout: Optional[str] = None, - unexpected_in_stdout: Optional[str] = None + unexpected_in_stdout: Optional[str] = None, ): additional_parameters = additional_parameters or [] @@ -3250,18 +3418,18 @@ def mocked_delete_file_version( def test_rm_fail_fast(self): # Since we already have all the jobs submitted to another thread, # we can only rely on the log to tell when it stopped. - expected_in_stdout = ''' + expected_in_stdout = """ Deletion of file "b/b1/test.csv" (9996) failed: Conflict: - count: 3/4''' + count: 3/4""" unexpected_in_stdout = ' count: 5/5 ' self._run_problematic_removal(['--fail-fast'], expected_in_stdout, unexpected_in_stdout) def test_rm_skipping_over_errors(self): self._run_problematic_removal() - expected_stdout = ''' + expected_stdout = """ b/b1/test.csv - ''' + """ self._run_command(['ls', '--recursive', *self.b2_uri_args('my-bucket')], expected_stdout) @pytest.mark.apiver(from_ver=4) @@ -3270,7 +3438,7 @@ def test_rm_b2id(self): file_version = self.bucket.upload(UploadSourceBytes(b''), 'new-file.txt') # Before deleting - expected_stdout = ''' + expected_stdout = """ a/test.csv a/test.tsv b/b/test.csv @@ -3280,14 +3448,14 @@ def test_rm_b2id(self): c/test.csv c/test.tsv new-file.txt - ''' + """ self._run_command(['ls', '--recursive', 'b2://my-bucket'], expected_stdout) # Delete file self._run_command(['rm', '--no-progress', f'b2id://{file_version.id_}'], '', '', 0) # After deleting - expected_stdout = ''' + expected_stdout = """ a/test.csv a/test.tsv b/b/test.csv @@ -3296,7 +3464,7 @@ def test_rm_b2id(self): b/test.txt c/test.csv c/test.tsv - ''' + """ self._run_command(['ls', '--recursive', 'b2://my-bucket'], expected_stdout) def rm_filters_helper(self, rm_args: List[str], expected_ls_stdout: str): @@ -3324,7 +3492,7 @@ def rm_filters_helper(self, rm_args: List[str], expected_ls_stdout: str): ) def test_rm_filters_include(self): - expected_ls_stdout = ''' + expected_ls_stdout = """ a/test.csv a/test.tsv b/b/test.csv @@ -3332,49 +3500,49 @@ def test_rm_filters_include(self): c/test.tsv test.tsv test.txt - ''' + """ self.rm_filters_helper(['--include', '*.csv'], expected_ls_stdout) def test_rm_filters_exclude(self): - expected_ls_stdout = ''' + expected_ls_stdout = """ a/test.csv a/test.tsv b/b/test.csv c/test.csv c/test.tsv test.csv - ''' + """ self.rm_filters_helper(['--exclude', '*.csv'], expected_ls_stdout) def test_rm_filters_include_recursive(self): - expected_ls_stdout = ''' + expected_ls_stdout = """ a/test.tsv c/test.tsv test.tsv test.txt - ''' + """ self.rm_filters_helper(['--recursive', '--include', '*.csv'], expected_ls_stdout) def test_rm_filters_exclude_recursive(self): - expected_ls_stdout = ''' + expected_ls_stdout = """ a/test.csv b/b/test.csv c/test.csv test.csv - ''' + """ self.rm_filters_helper(['--recursive', '--exclude', '*.csv'], expected_ls_stdout) def test_rm_filters_mixed(self): - expected_ls_stdout = ''' + expected_ls_stdout = """ a/test.csv a/test.tsv c/test.tsv test.tsv test.txt - ''' + """ self.rm_filters_helper( ['--recursive', '--exclude', '*', '--include', '*.csv', '--exclude', 'a/*'], - expected_ls_stdout + expected_ls_stdout, ) diff --git a/test/unit/test_copy.py b/test/unit/test_copy.py index 336bf53c6..5d75cb33a 100644 --- a/test/unit/test_copy.py +++ b/test/unit/test_copy.py @@ -66,12 +66,14 @@ def test_determine_source_metadata(self): assert len(mock_api.method_calls) == 0 source_sse_c = EncryptionSetting( - EncryptionMode.SSE_C, EncryptionAlgorithm.AES256, - EncryptionKey(b'some_key', UNKNOWN_KEY_ID) + EncryptionMode.SSE_C, + EncryptionAlgorithm.AES256, + EncryptionKey(b'some_key', UNKNOWN_KEY_ID), ) destination_sse_c = EncryptionSetting( - EncryptionMode.SSE_C, EncryptionAlgorithm.AES256, - EncryptionKey(b'some_other_key', 'key_id') + EncryptionMode.SSE_C, + EncryptionAlgorithm.AES256, + EncryptionKey(b'some_other_key', 'key_id'), ) result = copy_file_command._determine_source_metadata( @@ -86,9 +88,10 @@ def test_determine_source_metadata(self): assert len(mock_api.method_calls) == 0 with self.assertRaises( - ValueError, 'Attempting to copy file with metadata while either source or ' + ValueError, + 'Attempting to copy file with metadata while either source or ' 'destination uses SSE-C. Use --fetch-metadata to fetch source ' - 'file metadata before copying.' + 'file metadata before copying.', ): copy_file_command._determine_source_metadata( B2FileIdURI('id'), diff --git a/test/unit/test_represent_file_metadata.py b/test/unit/test_represent_file_metadata.py index e92f19225..642efa9dc 100644 --- a/test/unit/test_represent_file_metadata.py +++ b/test/unit/test_represent_file_metadata.py @@ -51,7 +51,8 @@ def setUp(self): 'listBuckets', 'listFiles', 'readFiles', - ], 'restricted' + ], + 'restricted', ) self.restricted_key_id, self.restricted_key = new_key.id_, new_key.application_key @@ -87,9 +88,10 @@ def assertLegalHoldRepr(self, file_id: str, api: B2Api, expected_repr: str): def assertEncryptionRepr(self, file_id: str, expected_repr: str): file_version = self.master_b2_api.get_file_info(file_id) - assert DownloadCommand._represent_encryption( - file_version.server_side_encryption - ) == expected_repr + assert ( + DownloadCommand._represent_encryption(file_version.server_side_encryption) + == expected_repr + ) def test_file_retention(self): file = self.lock_disabled_bucket.upload_bytes(b'insignificant', 'file') @@ -104,8 +106,9 @@ def test_file_retention(self): file.id_, file.file_name, FileRetentionSetting(RetentionMode.GOVERNANCE, 1500) ) self.assertRetentionRepr( - file.id_, self.master_b2_api, - 'mode=governance, retainUntil=1970-01-01 00:00:01.500000+00:00' + file.id_, + self.master_b2_api, + 'mode=governance, retainUntil=1970-01-01 00:00:01.500000+00:00', ) self.assertRetentionRepr(file.id_, self.restricted_b2_api, '') @@ -151,7 +154,7 @@ def test_encryption(self): EncryptionMode.SSE_C, algorithm=EncryptionAlgorithm.AES256, key=EncryptionKey(b'', key_id=None), - ) + ), ) self.assertEncryptionRepr(file.id_, 'mode=SSE-C, algorithm=AES256') @@ -162,6 +165,6 @@ def test_encryption(self): EncryptionMode.SSE_C, algorithm=EncryptionAlgorithm.AES256, key=EncryptionKey(b'', key_id='some_id'), - ) + ), ) self.assertEncryptionRepr(file.id_, 'mode=SSE-C, algorithm=AES256, key_id=some_id')