diff --git a/README.md b/README.md index 274b8a480..e3c3ea594 100644 --- a/README.md +++ b/README.md @@ -69,7 +69,7 @@ b2 cancel-all-unfinished-large-files [-h] bucketName b2 cancel-large-file [-h] fileId b2 clear-account [-h] b2 copy-file-by-id [-h] [--fetch-metadata] [--content-type CONTENTTYPE] [--range RANGE] [--info INFO | --no-info] [--destination-server-side-encryption {SSE-B2,SSE-C}] [--destination-server-side-encryption-algorithm {AES256}] [--source-server-side-encryption {SSE-C}] [--source-server-side-encryption-algorithm {AES256}] [--file-retention-mode {compliance,governance}] [--retain-until TIMESTAMP] [--legal-hold {on,off}] sourceFileId destinationBucketName b2FileName -b2 create-bucket [-h] [--bucket-info BUCKETINFO] [--cors-rules CORSRULES] [--file-lock-enabled] [--replication REPLICATION] [--default-server-side-encryption {SSE-B2,none}] [--default-server-side-encryption-algorithm {AES256}] [--lifecycle-rule LIFECYCLERULES | --lifecycleRules LIFECYCLERULES] bucketName {allPublic,allPrivate} +b2 create-bucket [-h] [--bucket-info BUCKETINFO] [--cors-rules CORSRULES] [--file-lock-enabled] [--replication REPLICATION] [--default-server-side-encryption {SSE-B2,none}] [--default-server-side-encryption-algorithm {AES256}] [--lifecycle-rule LIFECYCLERULES | --lifecycle-rules LIFECYCLERULES] bucketName {allPublic,allPrivate} b2 create-key [-h] [--bucket BUCKET] [--name-prefix NAMEPREFIX] [--duration DURATION] [--all-capabilities] keyName [capabilities] b2 delete-bucket [-h] bucketName b2 delete-file-version [-h] [--bypass-governance] [fileName] fileId @@ -90,7 +90,7 @@ b2 ls [-h] [--long] [--json] [--replication] [--versions] [-r] [--with-wildcard] b2 rm [-h] [--dry-run] [--queue-size QUEUESIZE] [--no-progress] [--fail-fast] [--threads THREADS] [--versions] [-r] [--with-wildcard] bucketName [folderName] b2 get-url [-h] B2_URI b2 sync [-h] [--no-progress] [--dry-run] [--allow-empty-source] [--exclude-all-symlinks] [--sync-threads SYNCTHREADS] [--download-threads DOWNLOADTHREADS] [--upload-threads UPLOADTHREADS] [--compare-versions {none,modTime,size}] [--compare-threshold MILLIS] [--exclude-regex REGEX] [--include-regex REGEX] [--exclude-dir-regex REGEX] [--exclude-if-modified-after TIMESTAMP] [--threads THREADS] [--destination-server-side-encryption {SSE-B2,SSE-C}] [--destination-server-side-encryption-algorithm {AES256}] [--source-server-side-encryption {SSE-C}] [--source-server-side-encryption-algorithm {AES256}] [--write-buffer-size BYTES] [--skip-hash-verification] [--max-download-streams-per-file MAX_DOWNLOAD_STREAMS_PER_FILE] [--incremental-mode] [--skip-newer | --replace-newer] [--delete | --keep-days DAYS] source destination -b2 update-bucket [-h] [--bucket-info BUCKETINFO] [--cors-rules CORSRULES] [--default-retention-mode {compliance,governance,none}] [--default-retention-period period] [--replication REPLICATION] [--file-lock-enabled] [--default-server-side-encryption {SSE-B2,none}] [--default-server-side-encryption-algorithm {AES256}] [--lifecycle-rule LIFECYCLERULES | --lifecycleRules LIFECYCLERULES] bucketName [{allPublic,allPrivate}] +b2 update-bucket [-h] [--bucket-info BUCKETINFO] [--cors-rules CORSRULES] [--default-retention-mode {compliance,governance,none}] [--default-retention-period period] [--replication REPLICATION] [--file-lock-enabled] [--default-server-side-encryption {SSE-B2,none}] [--default-server-side-encryption-algorithm {AES256}] [--lifecycle-rule LIFECYCLERULES | --lifecycle-rules LIFECYCLERULES] bucketName [{allPublic,allPrivate}] b2 upload-file [-h] [--content-type CONTENTTYPE] [--sha1 SHA1] [--cache-control CACHE_CONTROL] [--info INFO] [--custom-upload-timestamp CUSTOM_UPLOAD_TIMESTAMP] [--min-part-size MINPARTSIZE] [--threads THREADS] [--no-progress] [--destination-server-side-encryption {SSE-B2,SSE-C}] [--destination-server-side-encryption-algorithm {AES256}] [--legal-hold {on,off}] [--file-retention-mode {compliance,governance}] [--retain-until TIMESTAMP] [--incremental-mode] bucketName localFilePath b2FileName b2 upload-unbound-stream [-h] [--part-size PARTSIZE] [--unused-buffer-timeout-seconds UNUSEDBUFFERTIMEOUTSECONDS] [--content-type CONTENTTYPE] [--sha1 SHA1] [--cache-control CACHE_CONTROL] [--info INFO] [--custom-upload-timestamp CUSTOM_UPLOAD_TIMESTAMP] [--min-part-size MINPARTSIZE] [--threads THREADS] [--no-progress] [--destination-server-side-encryption {SSE-B2,SSE-C}] [--destination-server-side-encryption-algorithm {AES256}] [--legal-hold {on,off}] [--file-retention-mode {compliance,governance}] [--retain-until TIMESTAMP] bucketName localFilePath b2FileName b2 update-file-legal-hold [-h] [fileName] fileId {on,off} diff --git a/b2/_internal/_cli/autocomplete_cache.py b/b2/_internal/_cli/autocomplete_cache.py index 741b0e8b5..1b325a913 100644 --- a/b2/_internal/_cli/autocomplete_cache.py +++ b/b2/_internal/_cli/autocomplete_cache.py @@ -20,6 +20,7 @@ import argcomplete import platformdirs +from b2._internal.arg_parser import DeprecatedActionMarker from b2._internal.version import VERSION @@ -119,7 +120,7 @@ def _clean_parser(self, parser: argparse.ArgumentParser) -> None: parser.register('type', None, identity) def _get_deprecated_actions(actions): - return [action for action in actions if action.__class__.__name__ == 'DeprecatedAction'] + return [action for action in actions if isinstance(action, DeprecatedActionMarker)] for action in _get_deprecated_actions(parser._actions): parser._actions.remove(action) diff --git a/b2/_internal/arg_parser.py b/b2/_internal/arg_parser.py index e9223b703..f869af22f 100644 --- a/b2/_internal/arg_parser.py +++ b/b2/_internal/arg_parser.py @@ -171,17 +171,38 @@ def deprecated_action_call(self, parser, namespace, values, option_string=None, return deprecated_action_call +_kebab_to_snake_pattern = re.compile(r'-') +_camel_to_kebab_pattern = re.compile(r'(?<=[a-z])([A-Z])') +_kebab_to_camel_pattern = re.compile(r'-(\w)') + + def _camel_to_kebab(s: str): - return re.sub(r'(?<=[a-z])([A-Z])', r'-\1', s).lower() + return _camel_to_kebab_pattern.sub(r'-\1', s).lower() + + +def _kebab_to_camel(s: str): + return "--" + _kebab_to_camel_pattern.sub(lambda m: m.group(1).upper(), s[2:]) + + +def _kebab_to_snake(s: str): + return _kebab_to_snake_pattern.sub('_', s) + + +class DeprecatedActionMarker: + pass def add_normalized_argument(parser, param_name, *args, **kwargs): - kebab_param_name = _camel_to_kebab(param_name) + param_name_kebab = _camel_to_kebab(param_name) + param_name_camel = _kebab_to_camel(param_name_kebab) + dest_name_snake = _kebab_to_snake(param_name_kebab)[2:] kwargs_kebab = dict(kwargs) - kwargs['help'] = argparse.SUPPRESS + kwargs_camel = kwargs + kwargs_camel['help'] = argparse.SUPPRESS if 'dest' not in kwargs_kebab: - kwargs_kebab['dest'] = param_name[2:] + kwargs_kebab['dest'] = dest_name_snake + kwargs_camel['dest'] = dest_name_snake if 'action' in kwargs: if isinstance(kwargs['action'], str): @@ -191,11 +212,12 @@ def add_normalized_argument(parser, param_name, *args, **kwargs): else: action = argparse._StoreAction - kwargs['action'] = type( - 'DeprecatedAction', (action,), {'__call__': make_deprecated_action_call(action)} + kwargs_camel['action'] = type( + 'DeprecatedAction', (action, DeprecatedActionMarker), + {'__call__': make_deprecated_action_call(action)} ) - parser.add_argument(f'{kebab_param_name}', *args, **kwargs_kebab) + parser.add_argument(f'{param_name_kebab}', *args, **kwargs_kebab) - if SUPPORT_CAMEL_CASE_ARGUMENTS: - parser.add_argument(f'{param_name}', *args, **kwargs) + if SUPPORT_CAMEL_CASE_ARGUMENTS and param_name_kebab != param_name_camel: + parser.add_argument(f'{param_name_camel}', *args, **kwargs_camel) diff --git a/b2/_internal/console_tool.py b/b2/_internal/console_tool.py index 3cf3e4130..fdbaa194b 100644 --- a/b2/_internal/console_tool.py +++ b/b2/_internal/console_tool.py @@ -295,7 +295,7 @@ class DefaultSseMixin(Described): you can enable SSE-B2 encryption as a default setting for the bucket. In order to do that pass ``--default-server-side-encryption=SSE-B2``. The default algorithm is set to AES256 which can by changed - with ``--default-server_side_encryption_algorithm`` parameter. + with ``--default-server-side-encryption-algorithm`` parameter. All uploads to that bucket, from the time default encryption is enabled onward, will then be encrypted with SSE-B2 by default. @@ -312,23 +312,26 @@ class DefaultSseMixin(Described): @classmethod def _setup_parser(cls, parser): add_normalized_argument( - parser, '--defaultServerSideEncryption', default=None, choices=('SSE-B2', 'none') + parser, '--default-server-side-encryption', default=None, choices=('SSE-B2', 'none') ) add_normalized_argument( - parser, '--defaultServerSideEncryptionAlgorithm', default='AES256', choices=('AES256',) + parser, + '--default-server-side-encryption-algorithm', + default='AES256', + choices=('AES256',) ) super()._setup_parser(parser) # noqa @classmethod def _get_default_sse_setting(cls, args): - mode = apply_or_none(EncryptionMode, args.defaultServerSideEncryption) + mode = apply_or_none(EncryptionMode, args.default_server_side_encryption) if mode is not None: if mode == EncryptionMode.NONE: - args.defaultServerSideEncryptionAlgorithm = None + args.default_server_side_encryption_algorithm = None algorithm = apply_or_none( - EncryptionAlgorithm, args.defaultServerSideEncryptionAlgorithm + EncryptionAlgorithm, args.default_server_side_encryption_algorithm ) return EncryptionSetting(mode=mode, algorithm=algorithm) @@ -351,11 +354,14 @@ class DestinationSseMixin(Described): @classmethod def _setup_parser(cls, parser): add_normalized_argument( - parser, '--destinationServerSideEncryption', default=None, choices=('SSE-B2', 'SSE-C') + parser, + '--destination-server-side-encryption', + default=None, + choices=('SSE-B2', 'SSE-C') ) add_normalized_argument( parser, - '--destinationServerSideEncryptionAlgorithm', + '--destination-server-side-encryption-algorithm', default='AES256', choices=('AES256',) ) @@ -363,10 +369,10 @@ def _setup_parser(cls, parser): super()._setup_parser(parser) # noqa def _get_destination_sse_setting(self, args): - mode = apply_or_none(EncryptionMode, args.destinationServerSideEncryption) + mode = apply_or_none(EncryptionMode, args.destination_server_side_encryption) if mode is not None: algorithm = apply_or_none( - EncryptionAlgorithm, args.destinationServerSideEncryptionAlgorithm + EncryptionAlgorithm, args.destination_server_side_encryption_algorithm ) if mode == EncryptionMode.SSE_B2: key = None @@ -403,14 +409,14 @@ class FileRetentionSettingMixin(Described): def _setup_parser(cls, parser): add_normalized_argument( parser, - '--fileRetentionMode', + '--file-retention-mode', default=None, choices=(RetentionMode.COMPLIANCE.value, RetentionMode.GOVERNANCE.value) ) add_normalized_argument( parser, - '--retainUntil', + '--retain-until', type=parse_millis_from_float_timestamp, default=None, metavar='TIMESTAMP' @@ -419,42 +425,47 @@ def _setup_parser(cls, parser): @classmethod def _get_file_retention_setting(cls, args): - if (args.fileRetentionMode is None) != (args.retainUntil is None): + if (args.file_retention_mode is None) != (args.retain_until is None): raise ValueError( - 'provide either both --retainUntil and --fileRetentionMode or none of them' + 'provide either both --retain-until and --file-retention-mode or none of them' ) - file_retention_mode = apply_or_none(RetentionMode, args.fileRetentionMode) + file_retention_mode = apply_or_none(RetentionMode, args.file_retention_mode) if file_retention_mode is None: return None - return FileRetentionSetting(file_retention_mode, args.retainUntil) + return FileRetentionSetting(file_retention_mode, args.retain_until) class HeaderFlagsMixin(Described): @classmethod def _setup_parser(cls, parser: argparse.ArgumentParser) -> None: - parser.add_argument( + add_normalized_argument( + parser, '--cache-control', help= "optional Cache-Control header, value based on RFC 2616 section 14.9, example: 'public, max-age=86400')" ) - parser.add_argument( + add_normalized_argument( + parser, '--content-disposition', help= "optional Content-Disposition header, value based on RFC 2616 section 19.5.1, example: 'attachment; filename=\"fname.ext\"'" ) - parser.add_argument( + add_normalized_argument( + parser, '--content-encoding', help= "optional Content-Encoding header, value based on RFC 2616 section 14.11, example: 'gzip'" ) - parser.add_argument( + add_normalized_argument( + parser, '--content-language', help= "optional Content-Language header, value based on RFC 2616 section 14.12, example: 'mi, en'" ) - parser.add_argument( + add_normalized_argument( + parser, '--expires', help= "optional Expires header, value based on RFC 2616 section 14.21, example: 'Thu, 01 Dec 2050 16:00:00 GMT'" @@ -503,13 +514,13 @@ class LegalHoldMixin(Described): @classmethod def _setup_parser(cls, parser): add_normalized_argument( - parser, '--legalHold', default=None, choices=(LegalHold.ON.value, LegalHold.OFF.value) + parser, '--legal-hold', default=None, choices=(LegalHold.ON.value, LegalHold.OFF.value) ) super()._setup_parser(parser) # noqa @classmethod def _get_legal_hold_setting(cls, args) -> LegalHold: - return apply_or_none(LegalHold.from_string_or_none, args.legalHold) + return apply_or_none(LegalHold.from_string_or_none, args.legal_hold) class SourceSseMixin(Described): @@ -517,7 +528,7 @@ class SourceSseMixin(Described): To access SSE-C encrypted files, please set ``--source-server-side-encryption=SSE-C``. The default algorithm is set to AES256 which can by changed - with ``--source-server_side_encryption_algorithm`` parameter. + with ``--source-server-side-encryption-algorithm`` parameter. Using SSE-C requires providing ``{B2_SOURCE_SSE_C_KEY_B64_ENV_VAR}`` environment variable, containing the base64 encoded encryption key. """ @@ -525,19 +536,24 @@ class SourceSseMixin(Described): @classmethod def _setup_parser(cls, parser): add_normalized_argument( - parser, '--sourceServerSideEncryption', default=None, choices=('SSE-C',) + parser, '--source-server-side-encryption', default=None, choices=('SSE-C',) ) add_normalized_argument( - parser, '--sourceServerSideEncryptionAlgorithm', default='AES256', choices=('AES256',) + parser, + '--source-server-side-encryption-algorithm', + default='AES256', + choices=('AES256',) ) super()._setup_parser(parser) # noqa @classmethod def _get_source_sse_setting(cls, args): - mode = apply_or_none(EncryptionMode, args.sourceServerSideEncryption) + mode = apply_or_none(EncryptionMode, args.source_server_side_encryption) if mode is not None: - algorithm = apply_or_none(EncryptionAlgorithm, args.sourceServerSideEncryptionAlgorithm) + algorithm = apply_or_none( + EncryptionAlgorithm, args.source_server_side_encryption_algorithm + ) key = None if mode == EncryptionMode.SSE_C: encryption_key_b64 = os.environ.get(B2_SOURCE_SSE_C_KEY_B64_ENV_VAR) @@ -565,7 +581,7 @@ class WriteBufferSizeMixin(Described): @classmethod def _setup_parser(cls, parser): - parser.add_argument('--write-buffer-size', type=int, metavar='BYTES') + add_normalized_argument(parser, '--write-buffer-size', type=int, metavar='BYTES') super()._setup_parser(parser) # noqa @@ -576,7 +592,9 @@ class SkipHashVerificationMixin(Described): @classmethod def _setup_parser(cls, parser): - parser.add_argument('--skip-hash-verification', action='store_true', default=False) + add_normalized_argument( + parser, '--skip-hash-verification', action='store_true', default=False + ) super()._setup_parser(parser) # noqa @@ -587,7 +605,7 @@ class MaxDownloadStreamsMixin(Described): @classmethod def _setup_parser(cls, parser): - parser.add_argument('--max-download-streams-per-file', type=int) + add_normalized_argument(parser, '--max-download-streams-per-file', type=int) super()._setup_parser(parser) # noqa @@ -666,18 +684,18 @@ def get_b2_uri_from_arg(self, args: argparse.Namespace) -> B2URI: class UploadModeMixin(Described): """ - Use --incrementalMode to allow for incremental file uploads to safe bandwidth. This will only affect files, which + Use --incremental-mode to allow for incremental file uploads to safe bandwidth. This will only affect files, which have been appended to since last upload. """ @classmethod def _setup_parser(cls, parser): - add_normalized_argument(parser, '--incrementalMode', action='store_true') + add_normalized_argument(parser, '--incremental-mode', action='store_true') super()._setup_parser(parser) # noqa @staticmethod def _get_upload_mode_from_args(args): - if args.incrementalMode: + if args.incremental_mode: return UploadMode.INCREMENTAL return UploadMode.FULL @@ -692,17 +710,17 @@ class ProgressMixin(Described): @classmethod def _setup_parser(cls, parser): add_normalized_argument( - parser, '--noProgress', action='store_true', help="progress will not be reported" + parser, '--no-progress', action='store_true', help="progress will not be reported" ) super()._setup_parser(parser) # noqa class LifecycleRulesMixin(Described): """ - Use `--lifecycleRule` to set lifecycle rule for the bucket. + Use `--lifecycle-rule` to set lifecycle rule for the bucket. Multiple rules can be specified by repeating the option. - `--lifecycleRules` option is deprecated and cannot be used together with --lifecycleRule. + `--lifecycle-rules` option is deprecated and cannot be used together with --lifecycle-rule. """ @classmethod @@ -710,15 +728,16 @@ def _setup_parser(cls, parser): lifecycle_group = parser.add_mutually_exclusive_group() add_normalized_argument( lifecycle_group, - '--lifecycleRule', + '--lifecycle-rule', action='append', default=None, type=functools.partial(validated_loads, expected_type=LifecycleRule), - dest='lifecycleRules', + dest='lifecycle_rules', help="Lifecycle rule in JSON format. Can be supplied multiple times.", ) - lifecycle_group.add_argument( - '--lifecycleRules', + add_normalized_argument( + lifecycle_group, + '--lifecycle-rules', type=functools.partial(validated_loads, expected_type=List[LifecycleRule]), help= "(deprecated; use --lifecycle-rule instead) List of lifecycle rules in JSON format.", @@ -863,11 +882,11 @@ def create_parser( if cls.subcommands_registry: if not parents: common_parser = B2ArgumentParser(add_help=False, add_help_all=False) - common_parser.add_argument( - '--debugLogs', action='store_true', help=argparse.SUPPRESS + add_normalized_argument( + common_parser, '--debug-logs', action='store_true', help=argparse.SUPPRESS ) common_parser.add_argument('--verbose', action='store_true', help=argparse.SUPPRESS) - common_parser.add_argument('--logConfig', help=argparse.SUPPRESS) + add_normalized_argument(common_parser, '--log-config', help=argparse.SUPPRESS) common_parser.add_argument('--profile', default=None, help=argparse.SUPPRESS) common_parser.add_argument( '-q', '--quiet', action='store_true', default=False, help=argparse.SUPPRESS @@ -1291,15 +1310,17 @@ class CopyFileById( @classmethod def _setup_parser(cls, parser): - add_normalized_argument(parser, '--fetchMetadata', action='store_true', default=False) - add_normalized_argument(parser, '--metadataDirective', default=None, help=argparse.SUPPRESS) - add_normalized_argument(parser, '--contentType') + add_normalized_argument(parser, '--fetch-metadata', action='store_true', default=False) + add_normalized_argument( + parser, '--metadata-directive', default=None, help=argparse.SUPPRESS + ) + add_normalized_argument(parser, '--content-type') parser.add_argument('--range', type=parse_range) info_group = parser.add_mutually_exclusive_group() - info_group.add_argument('--info', action='append') - add_normalized_argument(info_group, '--noInfo', action='store_true', default=False) + add_normalized_argument(info_group, '--info', action='append') + add_normalized_argument(info_group, '--no-info', action='store_true', default=False) parser.add_argument('sourceFileId') parser.add_argument('destinationBucketName') @@ -1311,14 +1332,14 @@ def _run(self, args): file_infos = None if args.info: file_infos = self._parse_file_infos(args.info) - elif args.noInfo: + elif args.no_info: file_infos = {} file_infos = self._file_info_with_header_args(args, file_infos) - if args.metadataDirective is not None: + if args.metadata_directive is not None: self._print_stderr( - '--metadataDirective is deprecated, the value of this argument is determined based on the existence of ' - '--contentType and --info.' + '--metadata-directive is deprecated, the value of this argument is determined based on the existence of ' + '--content-type and --info.' ) bucket = self.api.get_bucket_by_name(args.destinationBucketName) @@ -1337,15 +1358,15 @@ def _run(self, args): source_file_id=args.sourceFileId, source_encryption=source_encryption_setting, destination_encryption=destination_encryption_setting, - target_content_type=args.contentType, + target_content_type=args.content_type, target_file_info=file_infos, - fetch_if_necessary=args.fetchMetadata, + fetch_if_necessary=args.fetch_metadata, ) file_version = bucket.copy( args.sourceFileId, args.b2FileName, **range_args, - content_type=args.contentType, + content_type=args.content_type, file_info=file_infos, destination_encryption=destination_encryption_setting, source_encryption=source_encryption_setting, @@ -1381,7 +1402,7 @@ def _determine_source_metadata( if not fetch_if_necessary: raise ValueError( 'Attempting to copy file with metadata while either source or destination uses ' - 'SSE-C. Use --fetchMetadata to fetch source file metadata before copying.' + 'SSE-C. Use --fetch-metadata to fetch source file metadata before copying.' ) source_file_version = self.api.get_file_info(source_file_id) return source_file_version.file_info, source_file_version.content_type @@ -1407,17 +1428,17 @@ class CreateBucket(DefaultSseMixin, LifecycleRulesMixin, Command): @classmethod def _setup_parser(cls, parser): - add_normalized_argument(parser, '--bucketInfo') + add_normalized_argument(parser, '--bucket-info', type=validated_loads) add_normalized_argument( parser, - '--corsRules', + '--cors-rules', type=validated_loads, help= "If given, the bucket will have a 'custom' CORS configuration. Accepts a JSON string." ) add_normalized_argument( parser, - '--fileLockEnabled', + '--file-lock-enabled', action='store_true', help= "If given, the bucket will have the file lock mechanism enabled. This parameter cannot be changed after bucket creation." @@ -1433,11 +1454,11 @@ def _run(self, args): bucket = self.api.create_bucket( args.bucketName, args.bucketType, - bucket_info=args.bucketInfo, - cors_rules=args.corsRules, - lifecycle_rules=args.lifecycleRules, + bucket_info=args.bucket_info, + cors_rules=args.cors_rules, + lifecycle_rules=args.lifecycle_rules, default_server_side_encryption=encryption_setting, - is_file_lock_enabled=args.fileLockEnabled, + is_file_lock_enabled=args.file_lock_enabled, replication=args.replication and ReplicationConfiguration.from_dict(args.replication), ) self._print(bucket.id_) @@ -1473,13 +1494,13 @@ class CreateKey(Command): @classmethod def _setup_parser(cls, parser): parser.add_argument('--bucket') - add_normalized_argument(parser, '--namePrefix') + add_normalized_argument(parser, '--name-prefix') parser.add_argument('--duration', type=int) parser.add_argument('keyName') capabilities = parser.add_mutually_exclusive_group(required=True) capabilities.add_argument('capabilities', type=parse_comma_separated_list, nargs='?') - add_normalized_argument(capabilities, '--allCapabilities', action='store_true') + add_normalized_argument(capabilities, '--all-capabilities', action='store_true') super()._setup_parser(parser) def _run(self, args): @@ -1489,7 +1510,7 @@ def _run(self, args): else: bucket_id_or_none = self.api.get_bucket_by_name(args.bucket).id_ - if args.allCapabilities: + if args.all_capabilities: args.capabilities = ALL_CAPABILITIES application_key = self.api.create_key( @@ -1497,7 +1518,7 @@ def _run(self, args): key_name=args.keyName, valid_duration_seconds=args.duration, bucket_id=bucket_id_or_none, - name_prefix=args.namePrefix + name_prefix=args.name_prefix ) self._print(f'{application_key.id_} {application_key.application_key}') @@ -1546,11 +1567,11 @@ class DeleteFileVersion(FileIdAndOptionalFileNameMixin, Command): @classmethod def _setup_parser(cls, parser): super()._setup_parser(parser) - add_normalized_argument(parser, '--bypassGovernance', action='store_true', default=False) + add_normalized_argument(parser, '--bypass-governance', action='store_true', default=False) def _run(self, args): file_name = self._get_file_name_from_args(args) - file_info = self.api.delete_file_version(args.fileId, file_name, args.bypassGovernance) + file_info = self.api.delete_file_version(args.fileId, file_name, args.bypass_governance) self._print_json(file_info) return 0 @@ -1735,7 +1756,7 @@ class DownloadFileBase( def _run(self, args): progress_listener = self.make_progress_listener( - args.localFileName, args.noProgress or args.quiet + args.localFileName, args.no_progress or args.quiet ) encryption_setting = self._get_source_sse_setting(args) self._set_threads_from_args(args) @@ -1804,7 +1825,7 @@ class Cat(B2URIFileArgMixin, DownloadCommand): def _run(self, args): target_filename = '-' progress_listener = self.make_progress_listener( - target_filename, args.noProgress or args.quiet + target_filename, args.no_progress or args.quiet ) encryption_setting = self._get_source_sse_setting(args) file_request = self.api.download_file_by_uri( @@ -1854,7 +1875,7 @@ class GetBucket(Command): @classmethod def _setup_parser(cls, parser): - add_normalized_argument(parser, '--showSize', action='store_true') + add_normalized_argument(parser, '--show-size', action='store_true') parser.add_argument('bucketName').completer = bucket_name_completer super()._setup_parser(parser) @@ -1862,7 +1883,7 @@ def _run(self, args): # This always wants up-to-date info, so it does not use # the bucket cache. for b in self.api.list_buckets(args.bucketName): - if not args.showSize: + if not args.show_size: self._print_json(b) return 0 else: @@ -2192,7 +2213,7 @@ class AbstractLsCommand(Command, metaclass=ABCMeta): def _setup_parser(cls, parser): parser.add_argument('--versions', action='store_true') parser.add_argument('-r', '--recursive', action='store_true') - add_normalized_argument(parser, '--withWildcard', action='store_true') + add_normalized_argument(parser, '--with-wildcard', action='store_true') parser.add_argument( '--include', dest='filters', action='append', type=Filter.include, default=[] ) @@ -2221,7 +2242,7 @@ def _get_ls_generator(self, args): b2_uri, latest_only=not args.versions, recursive=args.recursive, - with_wildcard=args.withWildcard, + with_wildcard=args.with_wildcard, filters=args.filters, ) @@ -2331,21 +2352,21 @@ class Ls(B2IDOrB2URIMixin, BaseLs): .. code-block:: - {NAME} ls --recursive --withWildcard "b2://bucketName/*.[ct]sv" + {NAME} ls --recursive --with-wildcard "b2://bucketName/*.[ct]sv" List all info.txt files from buckets bX, where X is any character: .. code-block:: - {NAME} ls --recursive --withWildcard "b2://bucketName/b?/info.txt" + {NAME} ls --recursive --with-wildcard "b2://bucketName/b?/info.txt" List all pdf files from buckets b0 to b9 (including sub-directories): .. code-block:: - {NAME} ls --recursive --withWildcard "b2://bucketName/b[0-9]/*.pdf" + {NAME} ls --recursive --with-wildcard "b2://bucketName/b[0-9]/*.pdf" Requires capability: @@ -2413,7 +2434,7 @@ def __init__( self.messages_queue = messages_queue self.reporter = reporter self.threads = threads - removal_queue_size = self.args.queueSize or (2 * self.threads) + removal_queue_size = self.args.queue_size or (2 * self.threads) self.semaphore = threading.BoundedSemaphore(value=removal_queue_size) self.fail_fast_event = threading.Event() self.mapping_lock = threading.Lock() @@ -2466,7 +2487,7 @@ def _removal_done(self, future: Future) -> None: # We wanted to remove this file anyway. self.reporter.update_count(1) except B2Error as error: - if self.args.failFast: + if self.args.fail_fast: # This is set before releasing the semaphore. # It means that when the semaphore is released, # we'll already have information about requirement to fail. @@ -2479,27 +2500,27 @@ def _removal_done(self, future: Future) -> None: @classmethod def _setup_parser(cls, parser): - add_normalized_argument(parser, '--dryRun', action='store_true') + add_normalized_argument(parser, '--dry-run', action='store_true') add_normalized_argument(parser, - '--queueSize', + '--queue-size', type=int, default=None, help='max elements fetched at once for removal, ' \ 'if left unset defaults to twice the number of threads.', ) - add_normalized_argument(parser, '--noProgress', action='store_true') - add_normalized_argument(parser, '--failFast', action='store_true') + add_normalized_argument(parser, '--no-progress', action='store_true') + add_normalized_argument(parser, '--fail-fast', action='store_true') super()._setup_parser(parser) def _run(self, args): - if args.dryRun: + if args.dry_run: self._print_files(args) return 0 failed_on_any_file = False messages_queue = queue.Queue() threads = self._get_threads_from_args(args) - with self.PROGRESS_REPORT_CLASS(self.stdout, args.noProgress or args.quiet) as reporter: + with self.PROGRESS_REPORT_CLASS(self.stdout, args.no_progress or args.quiet) as reporter: submit_thread = self.SubmitThread(self, args, messages_queue, reporter, threads=threads) # This thread is started in daemon mode, no joining needed. submit_thread.start() @@ -2517,7 +2538,7 @@ def _run(self, args): reporter.print_completion(message) failed_on_any_file = True - if args.failFast: + if args.fail_fast: break elif event_type == submit_thread.EXCEPTION_TAG: @@ -2547,21 +2568,21 @@ class Rm(B2IDOrB2URIMixin, BaseRm): .. code-block:: - {NAME} rm --recursive --withWildcard "b2://bucketName/*.[ct]sv" + {NAME} rm --recursive --with-wildcard "b2://bucketName/*.[ct]sv" Remove all info.txt files from buckets bX, where X is any character: .. code-block:: - {NAME} rm --recursive --withWildcard "b2://bucketName/b?/info.txt" + {NAME} rm --recursive --with-wildcard "b2://bucketName/b?/info.txt" Remove all pdf files from buckets b0 to b9 (including sub-directories): .. code-block:: - {NAME} rm --recursive --withWildcard "b2://bucketName/b[0-9]/*.pdf" + {NAME} rm --recursive --with-wildcard "b2://bucketName/b[0-9]/*.pdf" Requires capability: @@ -2635,7 +2656,7 @@ class Sync( You can alternatively control number of threads per each operation. The number of files processed in parallel is set by ``--sync-threads``, the number of files/file parts downloaded in parallel is set by``--download-threads``, - and the number of files/file parts uploaded in parallel is set by `--uploadThreads``. + and the number of files/file parts uploaded in parallel is set by `--upload-threads``. All the three parameters can be set to the same value by ``--threads``. Experiment with parameters if the defaults are not working well. @@ -2732,7 +2753,7 @@ class Sync( .. code-block:: - {NAME} sync --delete --replaceNewer ... ... + {NAME} sync --delete --replace-newer ... ... .. warning:: @@ -2745,14 +2766,14 @@ class Sync( .. code-block:: - {NAME} sync --keepDays 30 --replaceNewer ... b2://... + {NAME} sync --keep-days 30 --replace-newer ... b2://... Example of sync being used with ``--exclude-regex``. This will ignore ``.DS_Store`` files and ``.Spotlight-V100`` folders: .. code-block:: - {NAME} sync --excludeRegex '(.*\\.DS_Store)|(.*\\.Spotlight-V100)' ... b2://... + {NAME} sync --exclude-regex '(.*\\.DS_Store)|(.*\\.Spotlight-V100)' ... b2://... {DESTINATIONSSEMIXIN} {SOURCESSEMIXIN} @@ -2775,33 +2796,35 @@ class Sync( @classmethod def _setup_parser(cls, parser): - add_normalized_argument(parser, '--noProgress', action='store_true') - add_normalized_argument(parser, '--dryRun', action='store_true') - add_normalized_argument(parser, '--allowEmptySource', action='store_true') - add_normalized_argument(parser, '--excludeAllSymlinks', action='store_true') - add_normalized_argument(parser, '--syncThreads', type=int, default=cls.DEFAULT_SYNC_THREADS) + add_normalized_argument(parser, '--no-progress', action='store_true') + add_normalized_argument(parser, '--dry-run', action='store_true') + add_normalized_argument(parser, '--allow-empty-source', action='store_true') + add_normalized_argument(parser, '--exclude-all-symlinks', action='store_true') add_normalized_argument( - parser, '--downloadThreads', type=int, default=cls.DEFAULT_DOWNLOAD_THREADS + parser, '--sync-threads', type=int, default=cls.DEFAULT_SYNC_THREADS ) add_normalized_argument( - parser, '--uploadThreads', type=int, default=cls.DEFAULT_UPLOAD_THREADS + parser, '--download-threads', type=int, default=cls.DEFAULT_DOWNLOAD_THREADS ) add_normalized_argument( - parser, '--compareVersions', default='modTime', choices=('none', 'modTime', 'size') + parser, '--upload-threads', type=int, default=cls.DEFAULT_UPLOAD_THREADS ) - add_normalized_argument(parser, '--compareThreshold', type=int, metavar='MILLIS') add_normalized_argument( - parser, '--excludeRegex', action='append', default=[], metavar='REGEX' + parser, '--compare-versions', default='modTime', choices=('none', 'modTime', 'size') ) + add_normalized_argument(parser, '--compare-threshold', type=int, metavar='MILLIS') add_normalized_argument( - parser, '--includeRegex', action='append', default=[], metavar='REGEX' + parser, '--exclude-regex', action='append', default=[], metavar='REGEX' ) add_normalized_argument( - parser, '--excludeDirRegex', action='append', default=[], metavar='REGEX' + parser, '--include-regex', action='append', default=[], metavar='REGEX' + ) + add_normalized_argument( + parser, '--exclude-dir-regex', action='append', default=[], metavar='REGEX' ) add_normalized_argument( parser, - '--excludeIfModifiedAfter', + '--exclude-if-modified-after', type=parse_millis_from_float_timestamp, default=None, metavar='TIMESTAMP' @@ -2811,33 +2834,33 @@ def _setup_parser(cls, parser): parser.add_argument('destination') skip_group = parser.add_mutually_exclusive_group() - add_normalized_argument(skip_group, '--skipNewer', action='store_true') - add_normalized_argument(skip_group, '--replaceNewer', action='store_true') + add_normalized_argument(skip_group, '--skip-newer', action='store_true') + add_normalized_argument(skip_group, '--replace-newer', action='store_true') del_keep_group = parser.add_mutually_exclusive_group() - del_keep_group.add_argument('--delete', action='store_true') - add_normalized_argument(del_keep_group, '--keepDays', type=float, metavar='DAYS') + add_normalized_argument(del_keep_group, '--delete', action='store_true') + add_normalized_argument(del_keep_group, '--keep-days', type=float, metavar='DAYS') def _run(self, args): policies_manager = self.get_policies_manager_from_args(args) if args.threads is not None: - if args.syncThreads != self.DEFAULT_SYNC_THREADS \ - or args.uploadThreads != self.DEFAULT_UPLOAD_THREADS \ - or args.downloadThreads != self.DEFAULT_DOWNLOAD_THREADS: + if args.sync_threads != self.DEFAULT_SYNC_THREADS \ + or args.upload_threads != self.DEFAULT_UPLOAD_THREADS \ + or args.download_threads != self.DEFAULT_DOWNLOAD_THREADS: raise ValueError("--threads cannot be used with other thread options") sync_threads = upload_threads = download_threads = args.threads else: - sync_threads = args.syncThreads - upload_threads = args.uploadThreads - download_threads = args.downloadThreads + sync_threads = args.sync_threads + upload_threads = args.upload_threads + download_threads = args.download_threads self.api.services.upload_manager.set_thread_pool_size(upload_threads) self.api.services.download_manager.set_thread_pool_size(download_threads) source = parse_sync_folder(args.source, self.console_tool.api) destination = parse_sync_folder(args.destination, self.console_tool.api) - allow_empty_source = args.allowEmptySource or VERSION_0_COMPATIBILITY + allow_empty_source = args.allow_empty_source or VERSION_0_COMPATIBILITY synchronizer = self.get_synchronizer_from_args( args, @@ -2871,7 +2894,7 @@ def _run(self, args): write_bucket_settings=write_encryption_settings, ) - with SyncReport(self.stdout, args.noProgress or args.quiet) as reporter: + with SyncReport(self.stdout, args.no_progress or args.quiet) as reporter: try: synchronizer.sync_folders( source_folder=source, @@ -2892,11 +2915,11 @@ def _run(self, args): def get_policies_manager_from_args(self, args): return ScanPoliciesManager( - exclude_dir_regexes=args.excludeDirRegex, - exclude_file_regexes=args.excludeRegex, - include_file_regexes=args.includeRegex, - exclude_all_symlinks=args.excludeAllSymlinks, - exclude_modified_after=args.excludeIfModifiedAfter, + exclude_dir_regexes=args.exclude_dir_regex, + exclude_file_regexes=args.exclude_regex, + include_file_regexes=args.include_regex, + exclude_all_symlinks=args.exclude_all_symlinks, + exclude_modified_after=args.exclude_if_modified_after, ) def get_synchronizer_from_args( @@ -2907,30 +2930,30 @@ def get_synchronizer_from_args( allow_empty_source=False, absolute_minimum_part_size=None, ): - if args.replaceNewer: + if args.replace_newer: newer_file_mode = NewerFileSyncMode.REPLACE - elif args.skipNewer: + elif args.skip_newer: newer_file_mode = NewerFileSyncMode.SKIP else: newer_file_mode = NewerFileSyncMode.RAISE_ERROR - if args.compareVersions == 'none': + if args.compare_versions == 'none': compare_version_mode = CompareVersionMode.NONE - elif args.compareVersions == 'modTime': + elif args.compare_versions == 'modTime': compare_version_mode = CompareVersionMode.MODTIME - elif args.compareVersions == 'size': + elif args.compare_versions == 'size': compare_version_mode = CompareVersionMode.SIZE else: compare_version_mode = CompareVersionMode.MODTIME - compare_threshold = args.compareThreshold + compare_threshold = args.compare_threshold keep_days = None if args.delete: keep_days_or_delete = KeepOrDeleteMode.DELETE - elif args.keepDays: + elif args.keep_days: keep_days_or_delete = KeepOrDeleteMode.KEEP_BEFORE_DELETE - keep_days = args.keepDays + keep_days = args.keep_days else: keep_days_or_delete = KeepOrDeleteMode.NO_DELETE @@ -2939,7 +2962,7 @@ def get_synchronizer_from_args( return Synchronizer( max_workers, policies_manager=policies_manager, - dry_run=args.dryRun, + dry_run=args.dry_run, allow_empty_source=allow_empty_source, newer_file_mode=newer_file_mode, keep_days_or_delete=keep_days_or_delete, @@ -2994,17 +3017,17 @@ class UpdateBucket(DefaultSseMixin, LifecycleRulesMixin, Command): @classmethod def _setup_parser(cls, parser): - add_normalized_argument(parser, '--bucketInfo', type=validated_loads) + add_normalized_argument(parser, '--bucket-info', type=validated_loads) add_normalized_argument( parser, - '--corsRules', + '--cors-rules', type=validated_loads, help= "If given, the bucket will have a 'custom' CORS configuration. Accepts a JSON string." ) add_normalized_argument( parser, - '--defaultRetentionMode', + '--default-retention-mode', choices=( RetentionMode.COMPLIANCE.value, RetentionMode.GOVERNANCE.value, @@ -3014,14 +3037,14 @@ def _setup_parser(cls, parser): ) add_normalized_argument( parser, - '--defaultRetentionPeriod', + '--default-retention-period', type=parse_default_retention_period, metavar='period', ) parser.add_argument('--replication', type=validated_loads) add_normalized_argument( parser, - '--fileLockEnabled', + '--file-lock-enabled', action='store_true', default=None, help= @@ -3033,12 +3056,12 @@ def _setup_parser(cls, parser): super()._setup_parser(parser) # add parameters from the mixins and the parent class def _run(self, args): - if args.defaultRetentionMode is not None: - if args.defaultRetentionMode == 'none': + if args.default_retention_mode is not None: + if args.default_retention_mode == 'none': default_retention = NO_RETENTION_BUCKET_SETTING else: default_retention = BucketRetentionSetting( - RetentionMode(args.defaultRetentionMode), args.defaultRetentionPeriod + RetentionMode(args.default_retention_mode), args.default_retentionPeriod ) else: default_retention = None @@ -3050,13 +3073,13 @@ def _run(self, args): bucket = self.api.get_bucket_by_name(args.bucketName) bucket = bucket.update( bucket_type=args.bucketType, - bucket_info=args.bucketInfo, - cors_rules=args.corsRules, - lifecycle_rules=args.lifecycleRules, + bucket_info=args.bucket_info, + cors_rules=args.cors_rules, + lifecycle_rules=args.lifecycle_rules, default_server_side_encryption=encryption_setting, default_retention=default_retention, replication=replication, - is_file_lock_enabled=args.fileLockEnabled, + is_file_lock_enabled=args.file_lock_enabled, ) self._print_json(bucket) return 0 @@ -3074,7 +3097,7 @@ class MinPartSizeMixin(Described): def _setup_parser(cls, parser): add_normalized_argument( parser, - '--minPartSize', + '--min-part-size', type=int, help="minimum part size in bytes", default=None, @@ -3107,7 +3130,7 @@ class UploadFileMixin( def _setup_parser(cls, parser): add_normalized_argument( parser, - '--contentType', + '--content-type', help="MIME type of the file being uploaded. If not set it will be guessed." ) parser.add_argument( @@ -3120,7 +3143,8 @@ def _setup_parser(cls, parser): help= "additional file info to be stored with the file. Can be used multiple times for different information." ) - parser.add_argument( + add_normalized_argument( + parser, '--custom-upload-timestamp', type=int, help="overrides object creation date. Expressed as a number of milliseconds since epoch." @@ -3164,7 +3188,7 @@ def get_execute_kwargs(self, args) -> dict: "bucket": self.api.get_bucket_by_name(args.bucketName), "content_type": - args.contentType, + args.content_type, "custom_upload_timestamp": args.custom_upload_timestamp, "encryption": @@ -3180,9 +3204,9 @@ def get_execute_kwargs(self, args) -> dict: "local_file": args.localFilePath, "min_part_size": - args.minPartSize, + args.min_part_size, "progress_listener": - self.make_progress_listener(args.localFilePath, args.noProgress or args.quiet), + self.make_progress_listener(args.localFilePath, args.no_progress or args.quiet), "sha1_sum": args.sha1, "threads": @@ -3328,14 +3352,14 @@ class UploadUnboundStream(UploadFileMixin, Command): def _setup_parser(cls, parser): add_normalized_argument( parser, - '--partSize', + '--part-size', type=int, default=None, help=("part size in bytes. Must be in range of "), ) add_normalized_argument( parser, - '--unusedBufferTimeoutSeconds', + '--unused-buffer-timeout-seconds', type=float, default=3600.0, help=( @@ -3348,8 +3372,8 @@ def _setup_parser(cls, parser): def get_execute_kwargs(self, args) -> dict: kwargs = super().get_execute_kwargs(args) kwargs = self.upload_file_kwargs_to_unbound_upload(**kwargs) - kwargs["recommended_upload_part_size"] = args.partSize - kwargs["unused_buffer_timeout_seconds"] = args.unusedBufferTimeoutSeconds + kwargs["recommended_upload_part_size"] = args.part_size + kwargs["unused_buffer_timeout_seconds"] = args.unused_buffer_timeout_seconds return kwargs def execute_operation(self, local_file, bucket, threads, **kwargs): @@ -3391,7 +3415,7 @@ def _setup_parser(cls, parser): def _run(self, args): file_name = self._get_file_name_from_args(args) - legal_hold = LegalHold(args.legalHold) + legal_hold = LegalHold(args.legal_hold) self.api.update_file_legal_hold(args.fileId, file_name, legal_hold) return 0 @@ -3432,12 +3456,12 @@ def _setup_parser(cls, parser): ) add_normalized_argument( parser, - '--retainUntil', + '--retain-until', type=parse_millis_from_float_timestamp, metavar='TIMESTAMP', default=None ) - add_normalized_argument(parser, '--bypassGovernance', action='store_true', default=False) + add_normalized_argument(parser, '--bypass-governance', action='store_true', default=False) def _run(self, args): file_name = self._get_file_name_from_args(args) @@ -3446,11 +3470,11 @@ def _run(self, args): file_retention = FileRetentionSetting(RetentionMode.NONE) else: file_retention = FileRetentionSetting( - RetentionMode(args.retentionMode), args.retainUntil + RetentionMode(args.retention_mode), args.retain_until ) self.api.update_file_retention( - args.fileId, file_name, file_retention, args.bypassGovernance + args.fileId, file_name, file_retention, args.bypass_governance ) return 0 @@ -3470,11 +3494,14 @@ class ReplicationSetup(Command): @classmethod def _setup_parser(cls, parser): super()._setup_parser(parser) - parser.add_argument('--destination-profile', default=None) + add_normalized_argument(parser, '--destination-profile', default=None) parser.add_argument('source', metavar='SOURCE_BUCKET_NAME') parser.add_argument('destination', metavar='DESTINATION_BUCKET_NAME') - parser.add_argument('--name', help='name for the new replication rule on the source side') - parser.add_argument( + add_normalized_argument( + parser, '--name', help='name for the new replication rule on the source side' + ) + add_normalized_argument( + parser, '--priority', help= 'priority for the new replication rule on the source side [%d-%d]. Will be set automatically when not specified.' @@ -3485,12 +3512,14 @@ def _setup_parser(cls, parser): type=int, default=None, ) - parser.add_argument( + add_normalized_argument( + parser, '--file-name-prefix', metavar='PREFIX', help='only replicate files starting with PREFIX' ) - parser.add_argument( + add_normalized_argument( + parser, '--include-existing-files', action='store_true', help='if given, also replicates files uploaded prior to creation of the replication rule' @@ -3509,7 +3538,7 @@ def _run(self, args): ).get_fresh_state(), name=args.name, priority=args.priority, - prefix=args.file_name_prefix, + prefix=args.fileName_prefix, include_existing_files=args.include_existing_files, ) return 0 @@ -3638,7 +3667,7 @@ class ReplicationStatus(Command): "Console" output format is meant to be human-readable and is subject to change in any further release. One should use "json" for reliable "no-breaking-changes" output format. When piping "csv" format to some .csv file, it's handy to use - --noProgress flag which will disable interactive reporting output, otherwise it will + --no-progress flag which will disable interactive reporting output, otherwise it will also go to target csv file's first line. --columns @@ -3650,14 +3679,15 @@ class ReplicationStatus(Command): def _setup_parser(cls, parser): super()._setup_parser(parser) parser.add_argument('source', metavar='SOURCE_BUCKET_NAME') - parser.add_argument('--rule', metavar='REPLICATION_RULE_NAME', default=None) - parser.add_argument('--destination-profile') - parser.add_argument('--dont-scan-destination', action='store_true') - parser.add_argument( - '--output-format', default='console', choices=('console', 'json', 'csv') + add_normalized_argument(parser, '--rule', metavar='REPLICATION_RULE_NAME', default=None) + add_normalized_argument(parser, '--destination-profile') + add_normalized_argument(parser, '--dont-scan-destination', action='store_true') + add_normalized_argument( + parser, '--output-format', default='console', choices=('console', 'json', 'csv') ) - add_normalized_argument(parser, '--noProgress', action='store_true') - parser.add_argument( + add_normalized_argument(parser, '--no-progress', action='store_true') + add_normalized_argument( + parser, '--columns', default=['all'], type=lambda value: re.split(r', ?', value), @@ -3690,7 +3720,7 @@ def _run(self, args): rule=rule, destination_api=destination_api, scan_destination=not args.dont_scan_destination, - quiet=args.noProgress or args.quiet, + quiet=args.no_progress or args.quiet, ) for rule in rules } @@ -3801,7 +3831,7 @@ class Version(Command): @classmethod def _setup_parser(cls, parser): - parser.add_argument('--short', action='store_true') + add_normalized_argument(parser, '--short', action='store_true') super()._setup_parser(parser) def _run(self, args): @@ -3860,9 +3890,11 @@ def __init__(self, console_tool): @classmethod def _setup_parser(cls, parser): # these are for building, users should not call it: - parser.add_argument('--dump', action='store_true', default=False, help=argparse.SUPPRESS) - parser.add_argument( - '--with-packages', action='store_true', default=False, help=argparse.SUPPRESS + add_normalized_argument( + parser, '--dump', action='store_true', default=False, help=argparse.SUPPRESS + ) + add_normalized_argument( + parser, '--with-packages', action='store_true', default=False, help=argparse.SUPPRESS ) super()._setup_parser(parser) @@ -4016,7 +4048,7 @@ class InstallAutocomplete(Command): @classmethod def _setup_parser(cls, parser): - parser.add_argument('--shell', choices=SUPPORTED_SHELLS, default=None) + add_normalized_argument(parser, '--shell', choices=SUPPORTED_SHELLS, default=None) super()._setup_parser(parser) def _run(self, args): @@ -4151,11 +4183,11 @@ def _print_stderr(self, *args, **kwargs): @classmethod def _setup_logging(cls, args, argv): - if args.logConfig and (args.verbose or args.debugLogs): - raise ValueError('Please provide either --logConfig or --verbose/--debugLogs') - if args.logConfig: - logging.config.fileConfig(args.logConfig) - elif args.verbose or args.debugLogs: + if args.log_config and (args.verbose or args.debug_logs): + raise ValueError('Please provide either --log-config or --verbose/--debug-logs') + if args.log_config: + logging.config.fileConfig(args.log_config) + elif args.verbose or args.debug_logs: # set log level to DEBUG for ALL loggers (even those not belonging to B2), but without any handlers, # those will added as needed (file and/or stderr) logging.basicConfig(level=logging.DEBUG, handlers=[]) @@ -4168,7 +4200,7 @@ def _setup_logging(cls, args, argv): # logs from ALL loggers sent to stderr should be formatted this way logging.root.addHandler(handler) - if args.debugLogs: + if args.debug_logs: formatter = logging.Formatter( '%(asctime)s\t%(process)d\t%(thread)d\t%(name)s\t%(levelname)s\t%(message)s' ) diff --git a/changelog.d/+kebab.changed.md b/changelog.d/+kebab.changed.md index 034d4cbb4..b7d1218e3 100644 --- a/changelog.d/+kebab.changed.md +++ b/changelog.d/+kebab.changed.md @@ -1 +1,2 @@ -Migrate --camelCase cli flags to --kebab-case. +Migrated all _b2v4 --camelCase cli flags to --kebab-case +Added deprecation warning for b2v3 --camelCase cli flags diff --git a/test/unit/console_tool/test_authorize_account.py b/test/unit/console_tool/test_authorize_account.py index f8e49e3f5..e4ce9bfb5 100644 --- a/test/unit/console_tool/test_authorize_account.py +++ b/test/unit/console_tool/test_authorize_account.py @@ -80,7 +80,7 @@ def test_authorize_using_env_variables(b2_cli): "flags,realm_url", [ ([], "http://production.example.com"), - (["--debugLogs"], "http://production.example.com"), + (["--debug-logs"], "http://production.example.com"), (["--environment", "http://custom.example.com"], "http://custom.example.com"), (["--environment", "production"], "http://production.example.com"), (["--dev"], "http://api.backblazeb2.xyz:8180"), @@ -90,7 +90,7 @@ def test_authorize_using_env_variables(b2_cli): def test_authorize_towards_realm( b2_cli, b2_cli_is_authorized_afterwards, flags, realm_url, cwd_path, b2_cli_log_fix ): - expected_stderr = f"Using {realm_url}\n" if any(f != "--debugLogs" for f in flags) else "" + expected_stderr = f"Using {realm_url}\n" if any(f != "--debug-logs" for f in flags) else "" b2_cli._run_command( ["authorize-account", *flags, b2_cli.account_id, b2_cli.master_key], @@ -99,7 +99,7 @@ def test_authorize_towards_realm( 0, ) log_path = cwd_path / "b2_cli.log" - if "--debugLogs" in flags: + if "--debug-logs" in flags: assert f"Using {realm_url}\n" in log_path.read_text() else: assert not log_path.exists() diff --git a/test/unit/test_console_tool.py b/test/unit/test_console_tool.py index a6c7d3884..a4b51705c 100644 --- a/test/unit/test_console_tool.py +++ b/test/unit/test_console_tool.py @@ -259,6 +259,27 @@ def _upload_multiple_files(cls, bucket): class TestConsoleTool(BaseConsoleToolTest): + @pytest.mark.cli_version(to_version=3) + def test_camel_case_supported_in_v3(self): + self._authorize_account() + self._run_command( + ['create-bucket', 'my-bucket', '--bucketInfo', '{"xxx": "123"}', 'allPrivate'], + 'bucket_0\n', '', 0 + ) + self._run_command( + ['create-bucket', 'my-bucket-kebab', '--bucket-info', '{"xxx": "123"}', 'allPrivate'], + 'bucket_0\n', '', 0 + ) + + @pytest.mark.cli_version(from_version=4) + def test_camel_case_not_supported_in_v4(self): + self._authorize_account() + self._run_command(['create-bucket', 'my-bucket', '--bucketInfo', 'allPrivate'], '', '', 2) + self._run_command( + ['create-bucket', 'my-bucket-kebab', '--bucket-info', '{"xxx": "123"}', 'allPrivate'], + 'bucket_0\n', '', 0 + ) + def test_create_key_and_authorize_with_it(self): # Start with authorizing with the master key self._authorize_account() @@ -382,7 +403,7 @@ def test_create_bucket__with_lifecycle_rules(self): ) self._run_command( - ['create-bucket', 'my-bucket', 'allPrivate', '--lifecycleRules', rules], 'bucket_0\n', + ['create-bucket', 'my-bucket', 'allPrivate', '--lifecycle-rules', rules], 'bucket_0\n', '', 0 ) @@ -400,7 +421,7 @@ def test_create_bucket__mutually_exclusive_lifecycle_rules_options(self): self._run_command( [ 'create-bucket', 'my-bucket', 'allPrivate', '--lifecycle-rule', rule, - '--lifecycleRules', f"[{rule}]" + '--lifecycle-rules', f"[{rule}]" ], '', '', 2 ) diff --git a/test/unit/test_copy.py b/test/unit/test_copy.py index 2af3744a7..718fd9a18 100644 --- a/test/unit/test_copy.py +++ b/test/unit/test_copy.py @@ -86,7 +86,7 @@ def test_determine_source_metadata(self): with self.assertRaises( ValueError, 'Attempting to copy file with metadata while either source or ' - 'destination uses SSE-C. Use --fetchMetadata to fetch source ' + 'destination uses SSE-C. Use --fetch-metadata to fetch source ' 'file metadata before copying.' ): copy_file_command._determine_source_metadata(