diff --git a/.travis.yml b/.travis.yml index 23d5665..8916c15 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,7 +14,8 @@ script: - coverage run --concurrency=multiprocessing run_tests.py - coverage combine - coverage report -m -- flake8 +- make format-check +- make lint after_success: - codecov diff --git a/Makefile b/Makefile index fbaa0cb..c858db0 100644 --- a/Makefile +++ b/Makefile @@ -23,5 +23,14 @@ release: sdist ## Make a pypi release sdist: clean ## Make a source distribution python setup.py sdist -test: ## Make a test run +format-check: ## Check to make sure format is correct + black --check . + +format: ## Use black to format the python source code + black . + +lint: ## Check syntax with flake8 + flake8 + +test: lint ## Make a test run python run_tests.py -vxrs test/ diff --git a/conda_mirror/__init__.py b/conda_mirror/__init__.py index 74f4e66..80edaf0 100644 --- a/conda_mirror/__init__.py +++ b/conda_mirror/__init__.py @@ -1,4 +1,4 @@ - from ._version import get_versions -__version__ = get_versions()['version'] + +__version__ = get_versions()["version"] del get_versions diff --git a/conda_mirror/_version.py b/conda_mirror/_version.py index 36f2b2e..a6454e4 100644 --- a/conda_mirror/_version.py +++ b/conda_mirror/_version.py @@ -1,4 +1,3 @@ - # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build @@ -58,17 +57,18 @@ class NotThisMethod(Exception): def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f + return decorate -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None @@ -76,10 +76,13 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen([c] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) + p = subprocess.Popen( + [c] + args, + cwd=cwd, + env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), + ) break except EnvironmentError: e = sys.exc_info()[1] @@ -116,16 +119,22 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + "date": None, + } else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) + print( + "Tried directories %s but none started with prefix %s" + % (str(rootdirs), parentdir_prefix) + ) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @@ -181,7 +190,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d @@ -190,7 +199,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r'\d', r)]) + tags = set([r for r in refs if re.search(r"\d", r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: @@ -198,19 +207,26 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] + r = ref[len(tag_prefix) :] if verbose: print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + "date": date, + } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} + return { + "version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": "no suitable tags", + "date": None, + } @register_vcs_handler("git", "pieces_from_vcs") @@ -225,8 +241,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) @@ -234,10 +249,19 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", "%s*" % tag_prefix], - cwd=root) + describe_out, rc = run_command( + GITS, + [ + "describe", + "--tags", + "--dirty", + "--always", + "--long", + "--match", + "%s*" % tag_prefix, + ], + cwd=root, + ) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") @@ -260,17 +284,16 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] + git_describe = git_describe[: git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) + pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out return pieces # tag @@ -279,10 +302,12 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + full_tag, + tag_prefix, + ) return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] + pieces["closest-tag"] = full_tag[len(tag_prefix) :] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) @@ -293,13 +318,13 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): else: # HEX: no tags pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], - cwd=root) + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], - cwd=root)[0].strip() + date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ + 0 + ].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces @@ -330,8 +355,7 @@ def render_pep440(pieces): rendered += ".dirty" else: # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) + rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered @@ -445,11 +469,13 @@ def render_git_describe_long(pieces): def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} + return { + "version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None, + } if not style or style == "default": style = "pep440" # the default @@ -469,9 +495,13 @@ def render(pieces, style): else: raise ValueError("unknown style '%s'" % style) - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + "date": pieces.get("date"), + } def get_versions(): @@ -485,8 +515,7 @@ def get_versions(): verbose = cfg.verbose try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass @@ -495,13 +524,16 @@ def get_versions(): # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. - for i in cfg.versionfile_source.split('/'): + for i in cfg.versionfile_source.split("/"): root = os.path.dirname(root) except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None, + } try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) @@ -515,6 +547,10 @@ def get_versions(): except NotThisMethod: pass - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + "date": None, + } diff --git a/conda_mirror/conda_mirror.py b/conda_mirror/conda_mirror.py index 0de9c53..d9f1635 100644 --- a/conda_mirror/conda_mirror.py +++ b/conda_mirror/conda_mirror.py @@ -20,13 +20,9 @@ logger = None -DEFAULT_BAD_LICENSES = ['agpl', ''] +DEFAULT_BAD_LICENSES = ["agpl", ""] -DEFAULT_PLATFORMS = ['linux-64', - 'linux-32', - 'osx-64', - 'win-64', - 'win-32'] +DEFAULT_PLATFORMS = ["linux-64", "linux-32", "osx-64", "win-64", "win-32"] def _maybe_split_channel(channel): @@ -54,20 +50,19 @@ def _maybe_split_channel(channel): """ # strip trailing slashes - channel = channel.strip('/') + channel = channel.strip("/") default_url_base = "https://conda.anaconda.org" url_suffix = "/{channel}/{platform}/{file_name}" - if '://' not in channel: + if "://" not in channel: # assume we are being given a channel for anaconda.org logger.debug("Assuming %s is an anaconda.org channel", channel) url = default_url_base + url_suffix return url, channel # looks like we are being given a fully qualified channel - download_base, channel = channel.rsplit('/', 1) + download_base, channel = channel.rsplit("/", 1) download_template = download_base + url_suffix - logger.debug('download_template=%s. channel=%s', - download_template, channel) + logger.debug("download_template=%s. channel=%s", download_template, channel) return download_template, channel @@ -89,14 +84,12 @@ def _match(all_packages, key_glob_dict): """ matched = dict() - key_glob_dict = {key.lower(): glob.lower() - for key, glob - in key_glob_dict.items()} + key_glob_dict = {key.lower(): glob.lower() for key, glob in key_glob_dict.items()} for pkg_name, pkg_info in all_packages.items(): matched_all = [] # normalize the strings so that comparisons are easier for key, pattern in key_glob_dict.items(): - name = str(pkg_info.get(key, '')).lower() + name = str(pkg_info.get(key, "")).lower() if fnmatch.fnmatch(name, pattern): matched_all.append(True) else: @@ -126,109 +119,120 @@ def _make_arg_parser(): argument_parser : argparse.ArgumentParser The instantiated argument parser for this CLI """ - ap = argparse.ArgumentParser( - description="CLI interface for conda-mirror.py") + ap = argparse.ArgumentParser(description="CLI interface for conda-mirror.py") ap.add_argument( - '--upstream-channel', - help=('The target channel to mirror. Can be a channel on anaconda.org ' - 'like "conda-forge" or a full qualified channel like ' - '"https://repo.continuum.io/pkgs/free/"'), + "--upstream-channel", + help=( + "The target channel to mirror. Can be a channel on anaconda.org " + 'like "conda-forge" or a full qualified channel like ' + '"https://repo.continuum.io/pkgs/free/"' + ), ) ap.add_argument( - '--target-directory', - help='The place where packages should be mirrored to', + "--target-directory", help="The place where packages should be mirrored to", ) ap.add_argument( - '--temp-directory', + "--temp-directory", help=( - 'Temporary download location for the packages. Defaults to a ' - 'randomly selected temporary directory. Note that you might need ' - 'to specify a different location if your default temp directory ' - 'has less available space than your mirroring target'), - default=tempfile.gettempdir() + "Temporary download location for the packages. Defaults to a " + "randomly selected temporary directory. Note that you might need " + "to specify a different location if your default temp directory " + "has less available space than your mirroring target" + ), + default=tempfile.gettempdir(), ) ap.add_argument( - '--platform', - help=("The OS platform(s) to mirror. one of: {'linux-64', 'linux-32'," - "'osx-64', 'win-32', 'win-64'}"), + "--platform", + help=( + "The OS platform(s) to mirror. one of: {'linux-64', 'linux-32'," + "'osx-64', 'win-32', 'win-64'}" + ), ) ap.add_argument( - '-v', '--verbose', + "-v", + "--verbose", action="count", - help=("logging defaults to error/exception only. Takes up to three " - "'-v' flags. '-v': warning. '-vv': info. '-vvv': debug."), + help=( + "logging defaults to error/exception only. Takes up to three " + "'-v' flags. '-v': warning. '-vv': info. '-vvv': debug." + ), default=0, ) ap.add_argument( - '--config', - action="store", - help="Path to the yaml config file", + "--config", action="store", help="Path to the yaml config file", ) ap.add_argument( - '--pdb', + "--pdb", action="store_true", help="Enable PDB debugging on exception", default=False, ) ap.add_argument( - '--num-threads', + "--num-threads", action="store", default=1, type=int, - help="Num of threads for validation. 1: Serial mode. 0: All available." - ) + help="Num of threads for validation. 1: Serial mode. 0: All available.", + ) ap.add_argument( - '--version', - action="store_true", - help="Print version and quit", - default=False, + "--version", action="store_true", help="Print version and quit", default=False, ) ap.add_argument( - '--dry-run', + "--dry-run", action="store_true", - help=("Show what will be downloaded and what will be removed. Will not " - "validate existing packages"), - default=False + help=( + "Show what will be downloaded and what will be removed. Will not " + "validate existing packages" + ), + default=False, ) ap.add_argument( - '--no-validate-target', + "--no-validate-target", action="store_true", help="Skip validation of files already present in target-directory", default=False, ) ap.add_argument( - '--minimum-free-space', + "--minimum-free-space", help=("Threshold for free diskspace. Given in megabytes."), type=int, default=1000, ) ap.add_argument( - '--proxy', - help=('Proxy URL to access internet if needed'), + "--proxy", + help=("Proxy URL to access internet if needed"), type=str, default=None, ) ap.add_argument( - '--ssl-verify', '--ssl_verify', - help=('Path to a CA_BUNDLE file with certificates of trusted CAs, ' - 'this may be "False" to disable verification as per the ' - 'requests API.'), + "--ssl-verify", + "--ssl_verify", + help=( + "Path to a CA_BUNDLE file with certificates of trusted CAs, " + 'this may be "False" to disable verification as per the ' + "requests API." + ), type=_str_or_false, default=None, - dest='ssl_verify', + dest="ssl_verify", ) ap.add_argument( - '-k', '--insecure', - help=('Allow conda to perform "insecure" SSL connections and ' - "transfers. Equivalent to setting 'ssl_verify' to 'false'."), + "-k", + "--insecure", + help=( + 'Allow conda to perform "insecure" SSL connections and ' + "transfers. Equivalent to setting 'ssl_verify' to 'false'." + ), action="store_false", dest="ssl_verify", ) ap.add_argument( - '--max-retries', - help=('Maximum number of retries before a download error is reraised, ' - "defaults to 100"), + "--max-retries", + help=( + "Maximum number of retries before a download error is reraised, " + "defaults to 100" + ), type=int, default=100, dest="max_retries", @@ -239,18 +243,15 @@ def _make_arg_parser(): def _init_logger(verbosity): # set up the logger global logger - logger = logging.getLogger('conda_mirror') - logmap = {0: logging.ERROR, - 1: logging.WARNING, - 2: logging.INFO, - 3: logging.DEBUG} - loglevel = logmap.get(verbosity, '3') + logger = logging.getLogger("conda_mirror") + logmap = {0: logging.ERROR, 1: logging.WARNING, 2: logging.INFO, 3: logging.DEBUG} + loglevel = logmap.get(verbosity, "3") # clear all handlers for handler in logger.handlers: logger.removeHandler(handler) logger.setLevel(loglevel) - format_string = '%(levelname)s: %(message)s' + format_string = "%(levelname)s: %(message)s" formatter = logging.Formatter(fmt=format_string) stream_handler = logging.StreamHandler() stream_handler.setLevel(loglevel) @@ -258,8 +259,9 @@ def _init_logger(verbosity): logger.addHandler(stream_handler) - print("Log level set to %s" % logging.getLevelName(logmap[verbosity]), - file=sys.stdout) + print( + "Log level set to %s" % logging.getLevelName(logmap[verbosity]), file=sys.stdout + ) def _parse_and_format_args(): @@ -272,17 +274,18 @@ def _parse_and_format_args(): given_args, _ = parser._parse_known_args(sys.argv[1:], argparse.Namespace()) _init_logger(args.verbose) - logger.debug('sys.argv: %s', sys.argv) + logger.debug("sys.argv: %s", sys.argv) if args.version: from . import __version__ + print(__version__) sys.exit(1) config_dict = {} if args.config: logger.info("Loading config from %s", args.config) - with open(args.config, 'r') as f: + with open(args.config, "r") as f: config_dict = yaml.load(f) logger.info("config: %s", config_dict) @@ -290,26 +293,29 @@ def _parse_and_format_args(): for a in parser._actions: if ( # value exists in config file - (a.dest in config_dict) and + (a.dest in config_dict) + and # ignore values that can only be given on command line - (a.dest not in {'config', 'verbose', 'version'}) and + (a.dest not in {"config", "verbose", "version"}) + and # only use config file value if the value was not explicitly given on command line (a.dest not in given_args) ): logger.info("Using %s value from config file", a.dest) setattr(args, a.dest, config_dict.get(a.dest)) - blacklist = config_dict.get('blacklist') - whitelist = config_dict.get('whitelist') + blacklist = config_dict.get("blacklist") + whitelist = config_dict.get("whitelist") - for required in ('target_directory', 'platform', 'upstream_channel'): - if (not getattr(args, required)): + for required in ("target_directory", "platform", "upstream_channel"): + if not getattr(args, required): raise ValueError("Missing command line argument: %s", required) if args.pdb: # set the pdb_hook as the except hook for all exceptions def pdb_hook(exctype, value, traceback): pdb.post_mortem(traceback) + sys.excepthook = pdb_hook proxies = args.proxy @@ -321,26 +327,26 @@ def pdb_hook(exctype, value, traceback): # -> {'http': 'https://user:pass@proxy.tld'} # "https://user:pass@proxy.tld" # -> {'https': 'https://user:pass@proxy.tld'} - scheme, *url = proxies.split(':') + scheme, *url = proxies.split(":") if len(url) > 1: - url = ':'.join(url) + url = ":".join(url) else: - url = '{}:{}'.format(scheme, url[0]) + url = "{}:{}".format(scheme, url[0]) proxies = {scheme: url} return { - 'upstream_channel': args.upstream_channel, - 'target_directory': args.target_directory, - 'temp_directory': args.temp_directory, - 'platform': args.platform, - 'num_threads': args.num_threads, - 'blacklist': blacklist, - 'whitelist': whitelist, - 'dry_run': args.dry_run, - 'no_validate_target': args.no_validate_target, - 'minimum_free_space': args.minimum_free_space, - 'proxies': proxies, - 'ssl_verify': args.ssl_verify, - 'max_retries': args.max_retries, + "upstream_channel": args.upstream_channel, + "target_directory": args.target_directory, + "temp_directory": args.temp_directory, + "platform": args.platform, + "num_threads": args.num_threads, + "blacklist": blacklist, + "whitelist": whitelist, + "dry_run": args.dry_run, + "no_validate_target": args.no_validate_target, + "minimum_free_space": args.minimum_free_space, + "proxies": proxies, + "ssl_verify": args.ssl_verify, + "max_retries": args.max_retries, } @@ -397,7 +403,7 @@ def _validate(filename, md5=None, size=None): The reason why the package is being removed """ if md5: - calc = hashlib.md5(open(filename, 'rb').read()).hexdigest() + calc = hashlib.md5(open(filename, "rb").read()).hexdigest() if calc == md5: # If the MD5 matches, skip the other checks return filename, None @@ -405,17 +411,19 @@ def _validate(filename, md5=None, size=None): return _remove_package( filename, reason="Failed md5 validation. Expected: %s. Computed: %s" - % (calc, md5)) + % (calc, md5), + ) if size and size != os.stat(filename).st_size: return _remove_package(filename, reason="Failed size test") try: with tarfile.open(filename) as t: - t.extractfile('info/index.json').read().decode('utf-8') + t.extractfile("info/index.json").read().decode("utf-8") except (tarfile.TarError, EOFError): - logger.info("Validation failed because conda package is corrupted.", - exc_info=True) + logger.info( + "Validation failed because conda package is corrupted.", exc_info=True + ) return _remove_package(filename, reason="Tarfile read failure") return filename, None @@ -442,18 +450,19 @@ def get_repodata(channel, platform, proxies=None, ssl_verify=None): keyed on package name (e.g., twisted-16.0.0-py35_0.tar.bz2) """ url_template, channel = _maybe_split_channel(channel) - url = url_template.format(channel=channel, platform=platform, - file_name='repodata.json') + url = url_template.format( + channel=channel, platform=platform, file_name="repodata.json" + ) resp = requests.get(url, proxies=proxies, verify=ssl_verify).json() - info = resp.get('info', {}) - packages = resp.get('packages', {}) + info = resp.get("info", {}) + packages = resp.get("packages", {}) # Patch the repodata.json so that all package info dicts contain a "subdir" # key. Apparently some channels on anaconda.org do not contain the # 'subdir' field. I think this this might be relegated to the # Continuum-provided channels only, actually. for pkg_name, pkg_info in packages.items(): - pkg_info.setdefault('subdir', platform) + pkg_info.setdefault("subdir", platform) return info, packages @@ -480,19 +489,20 @@ def _download(url, target_directory, proxies=None, ssl_verify=None): chunk_size = 1024 # 1KB chunks logger.info("download_url=%s", url) # create a temporary file - target_filename = url.split('/')[-1] + target_filename = url.split("/")[-1] download_filename = os.path.join(target_directory, target_filename) - logger.debug('downloading to %s', download_filename) - with open(download_filename, 'w+b') as tf: - ret = requests.get(url, stream=True, - proxies=proxies, verify=ssl_verify) + logger.debug("downloading to %s", download_filename) + with open(download_filename, "w+b") as tf: + ret = requests.get(url, stream=True, proxies=proxies, verify=ssl_verify) for data in ret.iter_content(chunk_size): tf.write(data) file_size = os.path.getsize(download_filename) return file_size -def _download_backoff_retry(url, target_directory, proxies=None, ssl_verify=None, max_retries=100): +def _download_backoff_retry( + url, target_directory, proxies=None, ssl_verify=None, max_retries=100 +): """Download `url` to `target_directory` with exponential backoff in the event of failure. @@ -522,11 +532,15 @@ def _download_backoff_retry(url, target_directory, proxies=None, ssl_verify=None c += 1 two_c *= 2 try: - rtn = _download(url, target_directory, proxies=proxies, ssl_verify=ssl_verify) + rtn = _download( + url, target_directory, proxies=proxies, ssl_verify=ssl_verify + ) break except Exception: if c < max_retries: - logger.debug('downloading failed, retrying {0}/{1}'.format(c, max_retries)) + logger.debug( + "downloading failed, retrying {0}/{1}".format(c, max_retries) + ) time.sleep(delay * random.randint(0, two_c - 1)) else: raise @@ -585,24 +599,26 @@ def _validate_packages(package_repodata, package_directory, num_threads=1): # create argument list (necessary because multiprocessing.Pool.map does not # accept additional args to be passed to the mapped function) num_packages = len(local_packages) - val_func_arg_list = [(package, num, num_packages, package_repodata, - package_directory) - for num, package in enumerate(sorted(local_packages))] + val_func_arg_list = [ + (package, num, num_packages, package_repodata, package_directory) + for num, package in enumerate(sorted(local_packages)) + ] if num_threads == 1 or num_threads is None: # Do serial package validation (Takes a long time for large repos) - validation_results = map(_validate_or_remove_package, - val_func_arg_list) + validation_results = map(_validate_or_remove_package, val_func_arg_list) else: if num_threads == 0: num_threads = os.cpu_count() - logger.debug('num_threads=0 so it will be replaced by all available ' - 'cores: %s' % num_threads) - logger.info('Will use {} threads for package validation.' - ''.format(num_threads)) + logger.debug( + "num_threads=0 so it will be replaced by all available " + "cores: %s" % num_threads + ) + logger.info( + "Will use {} threads for package validation." "".format(num_threads) + ) p = multiprocessing.Pool(num_threads) - validation_results = p.map(_validate_or_remove_package, - val_func_arg_list) + validation_results = p.map(_validate_or_remove_package, val_func_arg_list) p.close() p.join() @@ -640,25 +656,34 @@ def _validate_or_remove_package(args): try: package_metadata = package_repodata[package] except KeyError: - logger.warning("%s is not in the upstream index. Removing...", - package) + logger.warning("%s is not in the upstream index. Removing...", package) reason = "Package is not in the repodata index" package_path = os.path.join(package_directory, package) return _remove_package(package_path, reason=reason) # validate the integrity of the package, the size of the package and # its hashes - logger.info('Validating {:4d} of {:4d}: {}.'.format(num + 1, num_packages, - package)) + logger.info("Validating {:4d} of {:4d}: {}.".format(num + 1, num_packages, package)) package_path = os.path.join(package_directory, package) - return _validate(package_path, - md5=package_metadata.get('md5'), - size=package_metadata.get('size')) + return _validate( + package_path, md5=package_metadata.get("md5"), size=package_metadata.get("size") + ) -def main(upstream_channel, target_directory, temp_directory, platform, - blacklist=None, whitelist=None, num_threads=1, dry_run=False, - no_validate_target=False, minimum_free_space=0, proxies=None, - ssl_verify=None, max_retries=100): +def main( + upstream_channel, + target_directory, + temp_directory, + platform, + blacklist=None, + whitelist=None, + num_threads=1, + dry_run=False, + no_validate_target=False, + minimum_free_space=0, + proxies=None, + ssl_verify=None, + max_retries=100, +): """ Parameters @@ -757,18 +782,19 @@ def main(upstream_channel, target_directory, temp_directory, platform, # 8. download repodata.json and repodata.json.bz2 # 9. copy new repodata.json and repodata.json.bz2 into the repo summary = { - 'validating-existing': set(), - 'validating-new': set(), - 'downloaded': set(), - 'blacklisted': set(), - 'to-mirror': set() + "validating-existing": set(), + "validating-new": set(), + "downloaded": set(), + "blacklisted": set(), + "to-mirror": set(), } # Implementation: if not os.path.exists(os.path.join(target_directory, platform)): os.makedirs(os.path.join(target_directory, platform)) - info, packages = get_repodata(upstream_channel, platform, - proxies=proxies, ssl_verify=ssl_verify) + info, packages = get_repodata( + upstream_channel, platform, proxies=proxies, ssl_verify=ssl_verify + ) local_directory = os.path.join(target_directory, platform) # 1. validate local repo @@ -784,7 +810,7 @@ def main(upstream_channel, target_directory, temp_directory, platform, if blacklist: blacklist_packages = {} for blist in blacklist: - logger.debug('blacklist item: %s', blist) + logger.debug("blacklist item: %s", blist) matched_packages = _match(packages, blist) logger.debug(pformat(list(matched_packages.keys()))) blacklist_packages.update(matched_packages) @@ -797,9 +823,8 @@ def main(upstream_channel, target_directory, temp_directory, platform, matched_packages = _match(packages, wlist) whitelist_packages.update(matched_packages) # make final mirror list of not-blacklist + whitelist - true_blacklist = set(blacklist_packages.keys()) - set( - whitelist_packages.keys()) - summary['blacklisted'].update(true_blacklist) + true_blacklist = set(blacklist_packages.keys()) - set(whitelist_packages.keys()) + summary["blacklisted"].update(true_blacklist) logger.info("BLACKLISTED PACKAGES") logger.info(pformat(true_blacklist)) @@ -808,7 +833,9 @@ def main(upstream_channel, target_directory, temp_directory, platform, if dry_run: local_packages = _list_conda_packages(local_directory) packages_slated_for_removal = [ - pkg_name for pkg_name in local_packages if pkg_name in summary['blacklisted'] + pkg_name + for pkg_name in local_packages + if pkg_name in summary["blacklisted"] ] logger.info("PACKAGES TO BE REMOVED") logger.info(pformat(packages_slated_for_removal)) @@ -817,20 +844,23 @@ def main(upstream_channel, target_directory, temp_directory, platform, # 4. Validate all local packages # construct the desired package repodata - desired_repodata = {pkgname: packages[pkgname] - for pkgname in possible_packages_to_mirror} + desired_repodata = { + pkgname: packages[pkgname] for pkgname in possible_packages_to_mirror + } if not (dry_run or no_validate_target): # Only validate if we're not doing a dry-run - validation_results = _validate_packages(desired_repodata, local_directory, num_threads) - summary['validating-existing'].update(validation_results) + validation_results = _validate_packages( + desired_repodata, local_directory, num_threads + ) + summary["validating-existing"].update(validation_results) # 5. figure out final list of packages to mirror # do the set difference of what is local and what is in the final # mirror list local_packages = _list_conda_packages(local_directory) to_mirror = possible_packages_to_mirror - set(local_packages) - logger.info('PACKAGES TO MIRROR') + logger.info("PACKAGES TO MIRROR") logger.info(pformat(sorted(to_mirror))) - summary['to-mirror'].update(to_mirror) + summary["to-mirror"].update(to_mirror) if dry_run: logger.info("Dry run complete. Exiting") return summary @@ -841,60 +871,72 @@ def main(upstream_channel, target_directory, temp_directory, platform, # c. move to local repo # mirror all new packages total_bytes = 0 - minimum_free_space_kb = (minimum_free_space * 1024 * 1024) + minimum_free_space_kb = minimum_free_space * 1024 * 1024 download_url, channel = _maybe_split_channel(upstream_channel) with tempfile.TemporaryDirectory(dir=temp_directory) as download_dir: - logger.info('downloading to the tempdir %s', download_dir) + logger.info("downloading to the tempdir %s", download_dir) for package_name in sorted(to_mirror): url = download_url.format( - channel=channel, - platform=platform, - file_name=package_name) + channel=channel, platform=platform, file_name=package_name + ) try: # make sure we have enough free disk space in the temp folder to meet threshold if shutil.disk_usage(download_dir).free < minimum_free_space_kb: - logger.error('Disk space below threshold in %s. Aborting download.', - download_dir) + logger.error( + "Disk space below threshold in %s. Aborting download.", + download_dir, + ) break # download package - total_bytes += _download_backoff_retry(url, download_dir, - proxies=proxies, - ssl_verify=ssl_verify, - max_retries=max_retries) + total_bytes += _download_backoff_retry( + url, + download_dir, + proxies=proxies, + ssl_verify=ssl_verify, + max_retries=max_retries, + ) # make sure we have enough free disk space in the target folder to meet threshold # while also being able to fit the packages we have already downloaded - if (shutil.disk_usage(local_directory).free - total_bytes) < minimum_free_space_kb: - logger.error('Disk space below threshold in %s. Aborting download', - local_directory) + if ( + shutil.disk_usage(local_directory).free - total_bytes + ) < minimum_free_space_kb: + logger.error( + "Disk space below threshold in %s. Aborting download", + local_directory, + ) break - summary['downloaded'].add((url, download_dir)) + summary["downloaded"].add((url, download_dir)) except Exception as ex: - logger.exception('Unexpected error: %s. Aborting download.', ex) + logger.exception("Unexpected error: %s. Aborting download.", ex) break # validate all packages in the download directory - validation_results = _validate_packages(packages, download_dir, - num_threads=num_threads) - summary['validating-new'].update(validation_results) - logger.debug('Newly downloaded files at %s are %s', - download_dir, - pformat(os.listdir(download_dir))) + validation_results = _validate_packages( + packages, download_dir, num_threads=num_threads + ) + summary["validating-new"].update(validation_results) + logger.debug( + "Newly downloaded files at %s are %s", + download_dir, + pformat(os.listdir(download_dir)), + ) # 8. Use already downloaded repodata.json contents but prune it of # packages we don't want - repodata = {'info': info, 'packages': packages} + repodata = {"info": info, "packages": packages} # compute the packages that we have locally - packages_we_have = set(local_packages + - _list_conda_packages(download_dir)) + packages_we_have = set(local_packages + _list_conda_packages(download_dir)) # remake the packages dictionary with only the packages we have # locally - repodata['packages'] = { - name: info for name, info in repodata['packages'].items() - if name in packages_we_have} + repodata["packages"] = { + name: info + for name, info in repodata["packages"].items() + if name in packages_we_have + } _write_repodata(download_dir, repodata) # move new conda packages @@ -904,16 +946,16 @@ def main(upstream_channel, target_directory, temp_directory, platform, logger.info("moving %s to %s", old_path, new_path) shutil.move(old_path, new_path) - for f in ('repodata.json', 'repodata.json.bz2'): + for f in ("repodata.json", "repodata.json.bz2"): download_path = os.path.join(download_dir, f) move_path = os.path.join(local_directory, f) shutil.move(download_path, move_path) # Also need to make a "noarch" channel or conda gets mad - noarch_path = os.path.join(target_directory, 'noarch') + noarch_path = os.path.join(target_directory, "noarch") if not os.path.exists(noarch_path): os.makedirs(noarch_path, exist_ok=True) - noarch_repodata = {'info': {}, 'packages': {}} + noarch_repodata = {"info": {}, "packages": {}} _write_repodata(noarch_path, noarch_repodata) return summary @@ -922,20 +964,19 @@ def main(upstream_channel, target_directory, temp_directory, platform, def _write_repodata(package_dir, repodata_dict): data = json.dumps(repodata_dict, indent=2, sort_keys=True) # strip trailing whitespace - data = '\n'.join(line.rstrip() for line in data.splitlines()) + data = "\n".join(line.rstrip() for line in data.splitlines()) # make sure we have newline at the end - if not data.endswith('\n'): - data += '\n' + if not data.endswith("\n"): + data += "\n" - with open(os.path.join(package_dir, - 'repodata.json'), 'w') as fo: + with open(os.path.join(package_dir, "repodata.json"), "w") as fo: fo.write(data) # compress repodata.json into the bz2 format. some conda commands still # need it - bz2_path = os.path.join(package_dir, 'repodata.json.bz2') - with open(bz2_path, 'wb') as fo: - fo.write(bz2.compress(data.encode('utf-8'))) + bz2_path = os.path.join(package_dir, "repodata.json.bz2") + with open(bz2_path, "wb") as fo: + fo.write(bz2.compress(data.encode("utf-8"))) if __name__ == "__main__": diff --git a/conda_mirror/diff_tar.py b/conda_mirror/diff_tar.py index c7cab98..b2a30b3 100644 --- a/conda_mirror/diff_tar.py +++ b/conda_mirror/diff_tar.py @@ -12,7 +12,7 @@ from os.path import abspath, isdir, join, relpath -REFERENCE_PATH = './reference.json' +REFERENCE_PATH = "./reference.json" class NoReferenceError(FileNotFoundError): @@ -24,8 +24,8 @@ def md5_file(path): Return the MD5 hashsum of the file given by `path` in hexadecimal representation. """ - h = hashlib.new('md5') - with open(path, 'rb') as fi: + h = hashlib.new("md5") + with open(path, "rb") as fi: while 1: chunk = fi.read(262144) if not chunk: @@ -40,7 +40,7 @@ def find_repos(mirror_dir): which contain a repodata.json and repodata.json.bz2 file. """ for root, unused_dirs, files in os.walk(mirror_dir): - if 'repodata.json' in files and 'repodata.json.bz2' in files: + if "repodata.json" in files and "repodata.json.bz2" in files: yield root @@ -52,8 +52,8 @@ def all_repodata(mirror_dir): """ d = {} for repo_path in find_repos(mirror_dir): - with open(join(repo_path, 'repodata.json')) as fi: - index = json.load(fi)['packages'] + with open(join(repo_path, "repodata.json")) as fi: + index = json.load(fi)["packages"] d[repo_path] = index return d @@ -67,9 +67,9 @@ def verify_all_repos(mirror_dir): for repo_path, index in d.items(): for fn, info in index.items(): path = join(repo_path, fn) - if info['md5'] == md5_file(path): + if info["md5"] == md5_file(path): continue - print('MD5 mismatch: %s' % path) + print("MD5 mismatch: %s" % path) def write_reference(mirror_dir): @@ -79,9 +79,9 @@ def write_reference(mirror_dir): """ data = json.dumps(all_repodata(mirror_dir), indent=2, sort_keys=True) # make sure we have newline at the end - if not data.endswith('\n'): - data += '\n' - with open(REFERENCE_PATH, 'w') as fo: + if not data.endswith("\n"): + data += "\n" + with open(REFERENCE_PATH, "w") as fo: fo.write(data) @@ -108,22 +108,22 @@ def get_updates(mirror_dir): for repo_path, index2 in d2.items(): index1 = d1.get(repo_path, {}) if index1 != index2: - for fn in 'repodata.json', 'repodata.json.bz2': + for fn in "repodata.json", "repodata.json.bz2": yield relpath(join(repo_path, fn), mirror_dir) for fn, info2 in index2.items(): info1 = index1.get(fn, {}) - if info1.get('md5') != info2['md5']: + if info1.get("md5") != info2["md5"]: yield relpath(join(repo_path, fn), mirror_dir) -def tar_repo(mirror_dir, outfile='update.tar', verbose=False): +def tar_repo(mirror_dir, outfile="update.tar", verbose=False): """ Write the so-called differential tarball, see get_updates(). """ - t = tarfile.open(outfile, 'w') + t = tarfile.open(outfile, "w") for f in get_updates(mirror_dir): if verbose: - print('adding: %s' % f) + print("adding: %s" % f) t.add(join(mirror_dir, f), f) t.close() if verbose: @@ -134,48 +134,49 @@ def main(): import argparse p = argparse.ArgumentParser( - description='create "differential" tarballs of a conda repository') + description='create "differential" tarballs of a conda repository' + ) - p.add_argument('repo_dir', - nargs='?', - action="store", - metavar='REPOSITORY', - help="path to repository directory") + p.add_argument( + "repo_dir", + nargs="?", + action="store", + metavar="REPOSITORY", + help="path to repository directory", + ) - p.add_argument('--create', - action="store_true", - help="create differential tarball") + p.add_argument("--create", action="store_true", help="create differential tarball") - p.add_argument('--reference', - action="store_true", - help="create a reference point file") + p.add_argument( + "--reference", action="store_true", help="create a reference point file" + ) - p.add_argument('--show', - action="store_true", - help="show the files in respect to the latest reference " - "point file (which would be included in the " - "differential tarball)") + p.add_argument( + "--show", + action="store_true", + help="show the files in respect to the latest reference " + "point file (which would be included in the " + "differential tarball)", + ) - p.add_argument('--verify', - action="store_true", - help="verify the mirror repository and exit") + p.add_argument( + "--verify", action="store_true", help="verify the mirror repository and exit" + ) - p.add_argument('-v', '--verbose', - action="store_true") + p.add_argument("-v", "--verbose", action="store_true") - p.add_argument('--version', - action="store_true", - help="print version and exit") + p.add_argument("--version", action="store_true", help="print version and exit") args = p.parse_args() if args.version: from conda_mirror import __version__ - print('conda-mirror: %s' % __version__) + + print("conda-mirror: %s" % __version__) return if not args.repo_dir: - p.error('exactly one REPOSITORY is required, try -h') + p.error("exactly one REPOSITORY is required, try -h") mirror_dir = abspath(args.repo_dir) if not isdir(mirror_dir): @@ -199,11 +200,14 @@ def main(): print("Nothing done.") except NoReferenceError: - sys.exit("""\ + sys.exit( + """\ Error: no such file: %s Please use the --reference option before creating a differential tarball.\ -""" % REFERENCE_PATH) +""" + % REFERENCE_PATH + ) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..993698f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,3 @@ +[tool.black] +line-length = 88 +include = '\.pyi?$' \ No newline at end of file diff --git a/requirements-test.txt b/requirements-test.txt index b517308..6fe5f49 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,3 +1,4 @@ coverage pytest -flake8 \ No newline at end of file +flake8 +black diff --git a/run_tests.py b/run_tests.py index 2863e4e..7633fb7 100755 --- a/run_tests.py +++ b/run_tests.py @@ -2,12 +2,12 @@ import sys import pytest -if __name__ == '__main__': +if __name__ == "__main__": # show output results from every test function - args = ['-v'] + args = ["-v"] # show the message output for skipped and expected failure tests - args.append('-rxs') - print('sys.argv={}'.format(sys.argv)) + args.append("-rxs") + print("sys.argv={}".format(sys.argv)) args.extend(sys.argv[1:]) # call pytest and exit with the return code from pytest so that # travis will fail correctly if tests fail diff --git a/setup.py b/setup.py index b2423c1..79b320a 100644 --- a/setup.py +++ b/setup.py @@ -2,33 +2,30 @@ import versioneer try: - with open('README.md') as f: + with open("README.md") as f: long_description = f.read() except Exception: - long_description = '' - print('Failed to load README.md as long_description') + long_description = "" + print("Failed to load README.md as long_description") setup( - name='conda_mirror', + name="conda_mirror", version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), author="Eric Dill", packages=find_packages(), - author_email='eric.dill@maxpoint.com', - description='Mirror an upstream conda channel to a local directory', + author_email="eric.dill@maxpoint.com", + description="Mirror an upstream conda channel to a local directory", long_description=long_description, long_description_content_type="text/markdown", - url='https://github.com/regro/conda-mirror', - platforms=['Linux', 'Mac OSX', 'Windows'], - license='BSD 3-Clause', - install_requires=[ - 'requests', - 'pyyaml', - ], + url="https://github.com/regro/conda-mirror", + platforms=["Linux", "Mac OSX", "Windows"], + license="BSD 3-Clause", + install_requires=["requests", "pyyaml"], entry_points={ "console_scripts": [ - 'conda-mirror = conda_mirror.conda_mirror:cli', - 'conda-diff-tar = conda_mirror.diff_tar:main', + "conda-mirror = conda_mirror.conda_mirror:cli", + "conda-diff-tar = conda_mirror.diff_tar:main", ] - } + }, ) diff --git a/test/conftest.py b/test/conftest.py index 836c430..7d353cf 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -2,4 +2,4 @@ from conda_mirror import conda_mirror -conda_mirror.logger = logging.getLogger('conda_mirror-test') +conda_mirror.logger = logging.getLogger("conda_mirror-test") diff --git a/test/test_conda_mirror.py b/test/test_conda_mirror.py index c9282a7..4ffa590 100644 --- a/test/test_conda_mirror.py +++ b/test/test_conda_mirror.py @@ -12,124 +12,129 @@ import pytest -anaconda_channel = 'https://repo.continuum.io/pkgs/free' +anaconda_channel = "https://repo.continuum.io/pkgs/free" -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def repodata(): repodata = {} - repodata['conda-forge'] = conda_mirror.get_repodata('conda-forge', - 'linux-64') - repodata[anaconda_channel] = conda_mirror.get_repodata(anaconda_channel, - 'linux-64') + repodata["conda-forge"] = conda_mirror.get_repodata("conda-forge", "linux-64") + repodata[anaconda_channel] = conda_mirror.get_repodata(anaconda_channel, "linux-64") return repodata def test_match(repodata): repodata_info, repodata_packages = repodata[anaconda_channel] - matched = conda_mirror._match(repodata_packages, {'name': 'jupyter'}) - assert set([v['name'] for v in matched.values()]) == set(['jupyter']) + matched = conda_mirror._match(repodata_packages, {"name": "jupyter"}) + assert set([v["name"] for v in matched.values()]) == set(["jupyter"]) - matched = conda_mirror._match(repodata_packages, {'name': "*"}) + matched = conda_mirror._match(repodata_packages, {"name": "*"}) assert len(matched) == len(repodata_packages) def test_version(): old_args = copy.copy(sys.argv) - sys.argv = ['conda-mirror', '--version'] + sys.argv = ["conda-mirror", "--version"] with pytest.raises(SystemExit): conda_mirror.cli() sys.argv = old_args def _get_smallest_packages(packages, num=1): - return sorted(packages, key=lambda x: packages[x]['size'])[:num] + return sorted(packages, key=lambda x: packages[x]["size"])[:num] @pytest.mark.parametrize( - 'channel,platform', - itertools.product([anaconda_channel, 'conda-forge'], ['linux-64'])) -@pytest.mark.parametrize('num_threads', [0, 1, 4]) + "channel,platform", + itertools.product([anaconda_channel, "conda-forge"], ["linux-64"]), +) +@pytest.mark.parametrize("num_threads", [0, 1, 4]) def test_cli(tmpdir, channel, platform, repodata, num_threads): info, packages = repodata[channel] - smallest_package, = _get_smallest_packages(packages) + (smallest_package,) = _get_smallest_packages(packages) # drop the html stuff. get just the channel - f2 = tmpdir.mkdir(channel.rsplit('/', 1)[-1]) + f2 = tmpdir.mkdir(channel.rsplit("/", 1)[-1]) f2.mkdir(platform) - f1 = tmpdir.mkdir('conf').join('conf.yaml') + f1 = tmpdir.mkdir("conf").join("conf.yaml") - f1.write(''' + f1.write( + """ blacklist: - name: "*" whitelist: - name: {} - version: {}'''.format(packages[smallest_package]['name'], - packages[smallest_package]['version'])) - cli_args = ("conda-mirror" - " --config {config}" - " --upstream-channel {channel}" - " --target-directory {target_directory}" - " --platform {platform}" - " --num-threads {num_threads}" - " --pdb" - " -vvv" - ).format(config=f1.strpath, - channel=channel, - target_directory=f2.strpath, - platform=platform, - num_threads=num_threads) + version: {}""".format( + packages[smallest_package]["name"], packages[smallest_package]["version"] + ) + ) + cli_args = ( + "conda-mirror" + " --config {config}" + " --upstream-channel {channel}" + " --target-directory {target_directory}" + " --platform {platform}" + " --num-threads {num_threads}" + " --pdb" + " -vvv" + ).format( + config=f1.strpath, + channel=channel, + target_directory=f2.strpath, + platform=platform, + num_threads=num_threads, + ) old_argv = copy.deepcopy(sys.argv) - sys.argv = cli_args.split(' ') + sys.argv = cli_args.split(" ") # Write a package that does not exist in the upstream repodata into the # mirror path to make sure we exercise a broken code path # https://github.com/maxpoint/conda-mirror/issues/29 - _write_bad_package(channel_dir=f2.strpath, platform_name=platform, - pkg_name='bad-1-0.tar.bz2') + _write_bad_package( + channel_dir=f2.strpath, platform_name=platform, pkg_name="bad-1-0.tar.bz2" + ) # Write a bad package that does exist in the upstream repodata into the # mirror path to make sure we can handle that case too info, packages = repodata[channel] upstream_pkg_name = next(iter(packages.keys())) - _write_bad_package(channel_dir=f2.strpath, platform_name=platform, - pkg_name=upstream_pkg_name) + _write_bad_package( + channel_dir=f2.strpath, platform_name=platform, pkg_name=upstream_pkg_name + ) conda_mirror.cli() sys.argv = old_argv - for f in ['repodata.json', 'repodata.json.bz2']: + for f in ["repodata.json", "repodata.json.bz2"]: # make sure the repodata file exists assert f in os.listdir(os.path.join(f2.strpath, platform)) # make sure that the repodata contains less than upstream since we prune it - with open(os.path.join(f2.strpath, platform, 'repodata.json'), 'r') as f: + with open(os.path.join(f2.strpath, platform, "repodata.json"), "r") as f: disk_repodata = json.load(f) - disk_info = disk_repodata.get('info', {}) + disk_info = disk_repodata.get("info", {}) assert len(disk_info) == len(info) - disk_packages = disk_repodata.get('packages', {}) + disk_packages = disk_repodata.get("packages", {}) assert len(disk_packages) < len(packages) - with bz2.BZ2File(os.path.join(f2.strpath, - platform, - 'repodata.json.bz2'), 'r') as f: + with bz2.BZ2File(os.path.join(f2.strpath, platform, "repodata.json.bz2"), "r") as f: contents = f.read().decode() rd = json.loads(contents) - assert len(rd['info']) == len(disk_info) - assert len(rd['packages']) == len(disk_packages) + assert len(rd["info"]) == len(disk_info) + assert len(rd["packages"]) == len(disk_packages) def _write_bad_package(channel_dir, platform_name, pkg_name): target_dir = os.path.join(channel_dir, platform_name) if not os.path.exists(target_dir): os.makedirs(target_dir) - with bz2.BZ2File(os.path.join(target_dir, pkg_name), 'wb') as f: + with bz2.BZ2File(os.path.join(target_dir, pkg_name), "wb") as f: f.write("This is a fake package".encode()) def test_main(tmpdir, repodata): - platform = 'linux-64' - channel = 'conda-forge' + platform = "linux-64" + channel = "conda-forge" target_directory = tmpdir.mkdir(platform) - temp_directory = tmpdir.mkdir(join(platform, 'temp')) + temp_directory = tmpdir.mkdir(join(platform, "temp")) info, packages = repodata[channel] smallest_package, next_smallest_package = _get_smallest_packages(packages, num=2) @@ -138,40 +143,54 @@ def test_main(tmpdir, repodata): target_directory=target_directory.strpath, temp_directory=temp_directory.strpath, platform=platform, - blacklist=[{'name': '*'}], - whitelist=[{'name': packages[smallest_package]['name'], - 'version': packages[smallest_package]['version']}]) + blacklist=[{"name": "*"}], + whitelist=[ + { + "name": packages[smallest_package]["name"], + "version": packages[smallest_package]["version"], + } + ], + ) - assert len(ret['validating-existing']) == 0, "There should be no already-downloaded packages" - validated_all_downloads = len(ret['downloaded']) == len(ret['validating-new']) + assert ( + len(ret["validating-existing"]) == 0 + ), "There should be no already-downloaded packages" + validated_all_downloads = len(ret["downloaded"]) == len(ret["validating-new"]) assert validated_all_downloads, "We should have downloaded at least one package" - previously_downloaded_packages = len(ret['downloaded']) + previously_downloaded_packages = len(ret["downloaded"]) ret = conda_mirror.main( upstream_channel=channel, target_directory=target_directory.strpath, temp_directory=temp_directory.strpath, platform=platform, - blacklist=[{'name': '*'}], - whitelist=[{'name': packages[next_smallest_package]['name'], - 'version': packages[next_smallest_package]['version']}]) + blacklist=[{"name": "*"}], + whitelist=[ + { + "name": packages[next_smallest_package]["name"], + "version": packages[next_smallest_package]["version"], + } + ], + ) msg = "We should have %s packages downloaded now" % previously_downloaded_packages - assert len(ret['validating-existing']) == previously_downloaded_packages, msg - validated_all_downloads = len(ret['downloaded']) == len(ret['validating-new']) + assert len(ret["validating-existing"]) == previously_downloaded_packages, msg + validated_all_downloads = len(ret["downloaded"]) == len(ret["validating-new"]) assert validated_all_downloads, "We should have downloaded at least one package" def test_dry_run_dumb(tmpdir): - platform = 'linux-64' - channel = 'conda-forge' + platform = "linux-64" + channel = "conda-forge" target_directory = tmpdir.mkdir(platform) - temp_directory = tmpdir.mkdir(join(platform, 'temp')) + temp_directory = tmpdir.mkdir(join(platform, "temp")) ret = conda_mirror.main( upstream_channel=channel, platform=platform, target_directory=target_directory.strpath, temp_directory=temp_directory.strpath, - dry_run=True + dry_run=True, ) - assert len(ret['to-mirror']) > 1, "We should have a great deal of packages slated to download" + assert ( + len(ret["to-mirror"]) > 1 + ), "We should have a great deal of packages slated to download" diff --git a/test/test_diff_tar.py b/test/test_diff_tar.py index 6d37386..ea64839 100644 --- a/test/test_diff_tar.py +++ b/test/test_diff_tar.py @@ -10,47 +10,44 @@ import conda_mirror.diff_tar as dt -EMPTY_MD5 = 'd41d8cd98f00b204e9800998ecf8427e' +EMPTY_MD5 = "d41d8cd98f00b204e9800998ecf8427e" @pytest.fixture def tmpdir(): tmpdir = tempfile.mkdtemp() - dt.mirror_dir = join(tmpdir, 'repo') - dt.REFERENCE_PATH = join(tmpdir, 'reference.json') + dt.mirror_dir = join(tmpdir, "repo") + dt.REFERENCE_PATH = join(tmpdir, "reference.json") yield tmpdir shutil.rmtree(tmpdir) def test_md5_file(tmpdir): - tmpfile = join(tmpdir, 'testfile') - with open(tmpfile, 'wb') as fo: - fo.write(b'A\n') - assert dt.md5_file(tmpfile) == 'bf072e9119077b4e76437a93986787ef' + tmpfile = join(tmpdir, "testfile") + with open(tmpfile, "wb") as fo: + fo.write(b"A\n") + assert dt.md5_file(tmpfile) == "bf072e9119077b4e76437a93986787ef" -def create_test_repo(subdirname='linux-64'): +def create_test_repo(subdirname="linux-64"): subdir = join(dt.mirror_dir, subdirname) os.makedirs(subdir) - with open(join(subdir, 'repodata.json'), 'w') as fo: - fo.write(json.dumps({'packages': - {'a-1.0-0.tar.bz2': {'md5': EMPTY_MD5}}})) - for fn in 'repodata.json.bz2', 'a-1.0-0.tar.bz2': - with open(join(subdir, fn), 'wb') as fo: + with open(join(subdir, "repodata.json"), "w") as fo: + fo.write(json.dumps({"packages": {"a-1.0-0.tar.bz2": {"md5": EMPTY_MD5}}})) + for fn in "repodata.json.bz2", "a-1.0-0.tar.bz2": + with open(join(subdir, fn), "wb") as fo: pass def test_find_repos(tmpdir): create_test_repo() - assert list(dt.find_repos(dt.mirror_dir)) == \ - [join(dt.mirror_dir, 'linux-64')] + assert list(dt.find_repos(dt.mirror_dir)) == [join(dt.mirror_dir, "linux-64")] def test_all_repodata_repos(tmpdir): create_test_repo() d = dt.all_repodata(dt.mirror_dir) - assert d[join(dt.mirror_dir, 'linux-64')]['a-1.0-0.tar.bz2']['md5'] == \ - EMPTY_MD5 + assert d[join(dt.mirror_dir, "linux-64")]["a-1.0-0.tar.bz2"]["md5"] == EMPTY_MD5 def test_verify_all_repos(tmpdir): @@ -66,50 +63,51 @@ def test_read_no_reference(tmpdir): def test_write_and_read_reference(tmpdir): create_test_repo() - dt.write_reference(join(tmpdir, 'repo')) + dt.write_reference(join(tmpdir, "repo")) ref = dt.read_reference() - assert ref[join(dt.mirror_dir, 'linux-64')]['a-1.0-0.tar.bz2']['md5'] == \ - EMPTY_MD5 + assert ref[join(dt.mirror_dir, "linux-64")]["a-1.0-0.tar.bz2"]["md5"] == EMPTY_MD5 def test_get_updates(tmpdir): create_test_repo() - dt.write_reference(join(tmpdir, 'repo')) + dt.write_reference(join(tmpdir, "repo")) assert list(dt.get_updates(dt.mirror_dir)) == [] - create_test_repo('win-32') + create_test_repo("win-32") lst = sorted(dt.get_updates(dt.mirror_dir)) - assert lst == ['win-32/a-1.0-0.tar.bz2', - 'win-32/repodata.json', - 'win-32/repodata.json.bz2'] + assert lst == [ + "win-32/a-1.0-0.tar.bz2", + "win-32/repodata.json", + "win-32/repodata.json.bz2", + ] def test_tar_repo(tmpdir): create_test_repo() - tarball = join(tmpdir, 'up.tar') + tarball = join(tmpdir, "up.tar") dt.write_reference(dt.mirror_dir) - create_test_repo('win-32') + create_test_repo("win-32") dt.tar_repo(dt.mirror_dir, tarball) assert isfile(tarball) def run_with_args(args): old_args = list(sys.argv) - sys.argv = ['conda-diff-tar'] + args + sys.argv = ["conda-diff-tar"] + args dt.main() sys.argv = old_args def test_version(): - run_with_args(['--version']) + run_with_args(["--version"]) def test_misc(tmpdir): create_test_repo() - run_with_args(['--reference', dt.mirror_dir]) + run_with_args(["--reference", dt.mirror_dir]) assert isfile(dt.REFERENCE_PATH) - create_test_repo('win-32') - run_with_args(['--show', dt.mirror_dir]) - run_with_args(['--create', '--verbose', dt.mirror_dir]) - run_with_args(['--verify', dt.mirror_dir]) + create_test_repo("win-32") + run_with_args(["--show", dt.mirror_dir]) + run_with_args(["--create", "--verbose", dt.mirror_dir]) + run_with_args(["--verify", dt.mirror_dir]) run_with_args([dt.mirror_dir]) # do nothing diff --git a/versioneer.py b/versioneer.py index f250cde..a5d4360 100644 --- a/versioneer.py +++ b/versioneer.py @@ -1,4 +1,3 @@ - # Version: 0.17 """The Versioneer - like a rocketeer, but for versions. @@ -277,6 +276,7 @@ """ from __future__ import print_function + try: import configparser except ImportError: @@ -308,11 +308,13 @@ def get_root(): setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ("Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND').") + err = ( + "Versioneer was unable to run the project root directory. " + "Versioneer requires setup.py to be executed from " + "its immediate directory (like 'python setup.py COMMAND'), " + "or in a way that lets it use sys.argv[0] to find the root " + "(like 'python path/to/setup.py COMMAND')." + ) raise VersioneerBadRootError(err) try: # Certain runtime workflows (setup.py install/develop in a setuptools @@ -325,8 +327,10 @@ def get_root(): me_dir = os.path.normcase(os.path.splitext(me)[0]) vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) if me_dir != vsr_dir: - print("Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(me), versioneer_py)) + print( + "Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(me), versioneer_py) + ) except NameError: pass return root @@ -348,6 +352,7 @@ def get(parser, name): if parser.has_option("versioneer", name): return parser.get("versioneer", name) return None + cfg = VersioneerConfig() cfg.VCS = VCS cfg.style = get(parser, "style") or "" @@ -364,6 +369,7 @@ def get(parser, name): class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" + # these dictionaries contain VCS-specific tools LONG_VERSION_PY = {} HANDLERS = {} @@ -371,17 +377,18 @@ class NotThisMethod(Exception): def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f + return decorate -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None @@ -389,10 +396,13 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen([c] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) + p = subprocess.Popen( + [c] + args, + cwd=cwd, + env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), + ) break except EnvironmentError: e = sys.exc_info()[1] @@ -415,7 +425,11 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode -LONG_VERSION_PY['git'] = ''' + + +LONG_VERSION_PY[ + "git" +] = ''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build @@ -990,7 +1004,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d @@ -999,7 +1013,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r'\d', r)]) + tags = set([r for r in refs if re.search(r"\d", r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: @@ -1007,19 +1021,26 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] + r = ref[len(tag_prefix) :] if verbose: print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + "date": date, + } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} + return { + "version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": "no suitable tags", + "date": None, + } @register_vcs_handler("git", "pieces_from_vcs") @@ -1034,8 +1055,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) @@ -1043,10 +1063,19 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", "%s*" % tag_prefix], - cwd=root) + describe_out, rc = run_command( + GITS, + [ + "describe", + "--tags", + "--dirty", + "--always", + "--long", + "--match", + "%s*" % tag_prefix, + ], + cwd=root, + ) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") @@ -1069,17 +1098,16 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] + git_describe = git_describe[: git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) + pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out return pieces # tag @@ -1088,10 +1116,12 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + full_tag, + tag_prefix, + ) return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] + pieces["closest-tag"] = full_tag[len(tag_prefix) :] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) @@ -1102,13 +1132,13 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): else: # HEX: no tags pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], - cwd=root) + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], - cwd=root)[0].strip() + date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ + 0 + ].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces @@ -1164,18 +1194,25 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + "date": None, + } else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) + print( + "Tried directories %s but none started with prefix %s" + % (str(rootdirs), parentdir_prefix) + ) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + SHORT_VERSION_PY = """ # This file was generated by 'versioneer.py' (0.17) from # revision-control system data, or from the parent directory name of an @@ -1201,11 +1238,13 @@ def versions_from_file(filename): contents = f.read() except EnvironmentError: raise NotThisMethod("unable to read _version.py") - mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) + mo = re.search( + r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S + ) if not mo: - mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) + mo = re.search( + r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S + ) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) @@ -1214,8 +1253,7 @@ def versions_from_file(filename): def write_to_version_file(filename, versions): """Write the given version number to the given _version.py file.""" os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, - indent=1, separators=(",", ": ")) + contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) @@ -1247,8 +1285,7 @@ def render_pep440(pieces): rendered += ".dirty" else: # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) + rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered @@ -1362,11 +1399,13 @@ def render_git_describe_long(pieces): def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} + return { + "version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None, + } if not style or style == "default": style = "pep440" # the default @@ -1386,9 +1425,13 @@ def render(pieces, style): else: raise ValueError("unknown style '%s'" % style) - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + "date": pieces.get("date"), + } class VersioneerBadRootError(Exception): @@ -1411,8 +1454,9 @@ def get_versions(verbose=False): handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS verbose = verbose or cfg.verbose - assert cfg.versionfile_source is not None, \ - "please set versioneer.versionfile_source" + assert ( + cfg.versionfile_source is not None + ), "please set versioneer.versionfile_source" assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" versionfile_abs = os.path.join(root, cfg.versionfile_source) @@ -1466,9 +1510,13 @@ def get_versions(verbose=False): if verbose: print("unable to compute version") - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, "error": "unable to compute version", - "date": None} + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + "date": None, + } def get_version(): @@ -1517,6 +1565,7 @@ def run(self): print(" date: %s" % vers.get("date")) if vers["error"]: print(" error: %s" % vers["error"]) + cmds["version"] = cmd_version # we override "build_py" in both distutils and setuptools @@ -1549,14 +1598,15 @@ def run(self): # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) + target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) + cmds["build_py"] = cmd_build_py if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe + # nczeczulin reports that py2exe won't like the pep440-style string # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. # setup(console=[{ @@ -1577,17 +1627,21 @@ def run(self): os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + cmds["build_exe"] = cmd_build_exe del cmds["build_py"] - if 'py2exe' in sys.modules: # py2exe enabled? + if "py2exe" in sys.modules: # py2exe enabled? try: from py2exe.distutils_buildexe import py2exe as _py2exe # py3 except ImportError: @@ -1606,13 +1660,17 @@ def run(self): os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + cmds["py2exe"] = cmd_py2exe # we override different "sdist" commands for both environments @@ -1639,8 +1697,10 @@ def make_release_tree(self, base_dir, files): # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, - self._versioneer_generated_versions) + write_to_version_file( + target_versionfile, self._versioneer_generated_versions + ) + cmds["sdist"] = cmd_sdist return cmds @@ -1695,11 +1755,13 @@ def do_setup(): root = get_root() try: cfg = get_config_from_root(root) - except (EnvironmentError, configparser.NoSectionError, - configparser.NoOptionError) as e: + except ( + EnvironmentError, + configparser.NoSectionError, + configparser.NoOptionError, + ) as e: if isinstance(e, (EnvironmentError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", - file=sys.stderr) + print("Adding sample versioneer config to setup.cfg", file=sys.stderr) with open(os.path.join(root, "setup.cfg"), "a") as f: f.write(SAMPLE_CONFIG) print(CONFIG_ERROR, file=sys.stderr) @@ -1708,15 +1770,18 @@ def do_setup(): print(" creating %s" % cfg.versionfile_source) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), - "__init__.py") + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") if os.path.exists(ipy): try: with open(ipy, "r") as f: @@ -1758,8 +1823,10 @@ def do_setup(): else: print(" 'versioneer.py' already in MANIFEST.in") if cfg.versionfile_source not in simple_includes: - print(" appending versionfile_source ('%s') to MANIFEST.in" % - cfg.versionfile_source) + print( + " appending versionfile_source ('%s') to MANIFEST.in" + % cfg.versionfile_source + ) with open(manifest_in, "a") as f: f.write("include %s\n" % cfg.versionfile_source) else: @@ -1808,6 +1875,7 @@ def scan_setup_py(): errors += 1 return errors + if __name__ == "__main__": cmd = sys.argv[1] if cmd == "setup":