From c6d2b90c85316a11d32a689c9ec751a87d44b4ae Mon Sep 17 00:00:00 2001 From: JackAtGaia <86691241+JackAtGaia@users.noreply.github.com> Date: Wed, 23 Feb 2022 15:44:11 -0800 Subject: [PATCH] Gaiaplat 1838 : Applying Black formatter to Docker_Dev (#1367) * GAIAPLAT-1838 - Applying Black formatter to docker_dev project. https://gaiaplatform.atlassian.net/browse/GAIAPLAT-1838 --- .pre-commit-config.yaml | 4 +- dev_tools/docker_dev/gdev/__main__.py | 4 +- dev_tools/docker_dev/gdev/cmd/build.py | 3 +- dev_tools/docker_dev/gdev/cmd/cfg.py | 10 +- dev_tools/docker_dev/gdev/cmd/dockerfile.py | 3 +- .../docker_dev/gdev/cmd/gen/_abc/build.py | 83 +-- dev_tools/docker_dev/gdev/cmd/gen/_abc/cfg.py | 79 ++- .../gdev/cmd/gen/_abc/dockerfile.py | 112 +-- .../docker_dev/gdev/cmd/gen/_abc/push.py | 5 +- dev_tools/docker_dev/gdev/cmd/gen/_abc/run.py | 83 +-- .../docker_dev/gdev/cmd/gen/_custom/build.py | 10 +- .../docker_dev/gdev/cmd/gen/_custom/cfg.py | 15 +- .../gdev/cmd/gen/_custom/dockerfile.py | 28 +- .../docker_dev/gdev/cmd/gen/_custom/run.py | 21 +- .../docker_dev/gdev/cmd/gen/apt/build.py | 5 +- dev_tools/docker_dev/gdev/cmd/gen/apt/cfg.py | 3 +- .../docker_dev/gdev/cmd/gen/apt/dockerfile.py | 21 +- dev_tools/docker_dev/gdev/cmd/gen/apt/push.py | 5 +- dev_tools/docker_dev/gdev/cmd/gen/apt/run.py | 4 +- .../docker_dev/gdev/cmd/gen/env/build.py | 5 +- dev_tools/docker_dev/gdev/cmd/gen/env/cfg.py | 3 +- .../docker_dev/gdev/cmd/gen/env/dockerfile.py | 9 +- dev_tools/docker_dev/gdev/cmd/gen/env/push.py | 5 +- dev_tools/docker_dev/gdev/cmd/gen/env/run.py | 5 +- .../docker_dev/gdev/cmd/gen/gaia/build.py | 5 +- dev_tools/docker_dev/gdev/cmd/gen/gaia/cfg.py | 3 +- .../gdev/cmd/gen/gaia/dockerfile.py | 9 +- .../docker_dev/gdev/cmd/gen/gaia/push.py | 5 +- dev_tools/docker_dev/gdev/cmd/gen/gaia/run.py | 5 +- .../docker_dev/gdev/cmd/gen/git/build.py | 5 +- dev_tools/docker_dev/gdev/cmd/gen/git/cfg.py | 3 +- .../docker_dev/gdev/cmd/gen/git/dockerfile.py | 25 +- dev_tools/docker_dev/gdev/cmd/gen/git/push.py | 5 +- .../docker_dev/gdev/cmd/gen/pip/build.py | 5 +- dev_tools/docker_dev/gdev/cmd/gen/pip/cfg.py | 3 +- .../docker_dev/gdev/cmd/gen/pip/dockerfile.py | 16 +- dev_tools/docker_dev/gdev/cmd/gen/pip/push.py | 5 +- dev_tools/docker_dev/gdev/cmd/gen/pip/run.py | 5 +- .../docker_dev/gdev/cmd/gen/pre_run/build.py | 5 +- .../docker_dev/gdev/cmd/gen/pre_run/cfg.py | 3 +- .../gdev/cmd/gen/pre_run/dockerfile.py | 47 +- .../docker_dev/gdev/cmd/gen/pre_run/push.py | 5 +- .../docker_dev/gdev/cmd/gen/pre_run/run.py | 5 +- .../docker_dev/gdev/cmd/gen/run/build.py | 5 +- dev_tools/docker_dev/gdev/cmd/gen/run/cfg.py | 3 +- .../docker_dev/gdev/cmd/gen/run/dockerfile.py | 19 +- dev_tools/docker_dev/gdev/cmd/gen/run/push.py | 5 +- dev_tools/docker_dev/gdev/cmd/gen/run/run.py | 5 +- .../docker_dev/gdev/cmd/gen/web/build.py | 5 +- dev_tools/docker_dev/gdev/cmd/gen/web/cfg.py | 3 +- .../docker_dev/gdev/cmd/gen/web/dockerfile.py | 18 +- dev_tools/docker_dev/gdev/cmd/gen/web/push.py | 5 +- dev_tools/docker_dev/gdev/cmd/gen/web/run.py | 5 +- dev_tools/docker_dev/gdev/cmd/push.py | 3 +- dev_tools/docker_dev/gdev/cmd/run.py | 2 +- dev_tools/docker_dev/gdev/custom/gaia_path.py | 23 +- dev_tools/docker_dev/gdev/dependency.py | 178 ++--- dev_tools/docker_dev/gdev/host.py | 49 +- dev_tools/docker_dev/gdev/main.py | 8 +- dev_tools/docker_dev/gdev/mount.py | 2 + dev_tools/docker_dev/gdev/options.py | 5 + dev_tools/docker_dev/gdev/parser_structure.py | 40 +- .../gdev/third_party/argcomplete.py | 6 +- .../docker_dev/gdev/third_party/atools.py | 4 +- dev_tools/docker_dev/test/gdev_execute.py | 73 +- dev_tools/docker_dev/test/pytest_execute.py | 25 +- .../docker_dev/test/test_scenario_help.py | 227 +++++++ dev_tools/docker_dev/test/test_scenarios.py | 640 +++++++++--------- 68 files changed, 1235 insertions(+), 812 deletions(-) create mode 100644 dev_tools/docker_dev/test/test_scenario_help.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3e81f199381f..df799edf5e95 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -171,13 +171,13 @@ repos: rev: 21.7b0 hooks: - id: black - exclude: ^(dev_tools/gdev/|dev_tools/docker_dev/|production/tools/tests/gaiat/lit.cfg.py) + exclude: ^(dev_tools/gdev/|production/tools/tests/gaiat/lit.cfg.py) - repo: https://github.com/PyCQA/flake8 rev: 4.0.1 hooks: - id: flake8 - exclude: ^(dev_tools/gdev/|dev_tools/docker_dev/|production/tools/tests/gaiat/lit.cfg.py) + exclude: ^(dev_tools/gdev/|production/tools/tests/gaiat/lit.cfg.py) args: - --ignore=E203,E501,W503 diff --git a/dev_tools/docker_dev/gdev/__main__.py b/dev_tools/docker_dev/gdev/__main__.py index e53e7aa275ae..914ee1d5d6bf 100644 --- a/dev_tools/docker_dev/gdev/__main__.py +++ b/dev_tools/docker_dev/gdev/__main__.py @@ -14,6 +14,7 @@ import sys from gdev.main import DockerDev + def main() -> int: """ Module main entry point into the application. @@ -21,5 +22,6 @@ def main() -> int: DockerDev().main() return 0 -if __name__ == '__main__': + +if __name__ == "__main__": sys.exit(main()) diff --git a/dev_tools/docker_dev/gdev/cmd/build.py b/dev_tools/docker_dev/gdev/cmd/build.py index f9a6c52ea3db..a290ee21699f 100644 --- a/dev_tools/docker_dev/gdev/cmd/build.py +++ b/dev_tools/docker_dev/gdev/cmd/build.py @@ -8,9 +8,10 @@ """ Module to provide for the `cfg` subcommand entry point. """ + from gdev.dependency import Dependency from gdev.third_party.atools import memoize -from .gen.run.build import GenRunBuild +from gdev.cmd.gen.run.build import GenRunBuild class Build(Dependency): diff --git a/dev_tools/docker_dev/gdev/cmd/cfg.py b/dev_tools/docker_dev/gdev/cmd/cfg.py index 5f97f81e9ef1..bb975536b452 100644 --- a/dev_tools/docker_dev/gdev/cmd/cfg.py +++ b/dev_tools/docker_dev/gdev/cmd/cfg.py @@ -8,8 +8,9 @@ """ Module to provide for the `cfg` subcommand entry point. """ + from gdev.third_party.atools import memoize -from .gen.run.cfg import GenRunCfg +from gdev.cmd.gen.run.cfg import GenRunCfg class Cfg(GenRunCfg): @@ -23,10 +24,7 @@ def cli_entrypoint(self) -> None: Execution entrypoint for this module. """ print( - '\n'.join( - self.get_lines( - cfg_enables=self.options.cfg_enables, - path=self.path - ) + "\n".join( + self.get_lines(cfg_enables=self.options.cfg_enables, path=self.path) ) ) diff --git a/dev_tools/docker_dev/gdev/cmd/dockerfile.py b/dev_tools/docker_dev/gdev/cmd/dockerfile.py index 6691f9a8b886..39a6db76b658 100644 --- a/dev_tools/docker_dev/gdev/cmd/dockerfile.py +++ b/dev_tools/docker_dev/gdev/cmd/dockerfile.py @@ -8,9 +8,10 @@ """ Module to provide for the `dockerfile` subcommand entry point. """ + from gdev.dependency import Dependency from gdev.third_party.atools import memoize -from .gen.run.dockerfile import GenRunDockerfile +from gdev.cmd.gen.run.dockerfile import GenRunDockerfile class Dockerfile(Dependency): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/_abc/build.py b/dev_tools/docker_dev/gdev/cmd/gen/_abc/build.py index fb4da88f3935..bc7fec1a9f8f 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/_abc/build.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/_abc/build.py @@ -24,8 +24,8 @@ # We require buildkit support for inline caching of multi-stage dockerfiles. It's also way faster # and the terminal output is relatively sane. -os.environ['DOCKER_BUILDKIT'] = '1' -os.environ['DOCKER_CLI_EXPERIMENTAL'] = 'enabled' +os.environ["DOCKER_BUILDKIT"] = "1" +os.environ["DOCKER_CLI_EXPERIMENTAL"] = "enabled" class GenAbcBuild(Dependency, ABC): @@ -53,12 +53,14 @@ def _get_actual_label_value(self, name: str) -> str: """ Request that docker provide information about the label value for the current image. """ - if (line := Host.execute_and_get_line_sync( - f'docker image inspect' + if ( + line := Host.execute_and_get_line_sync( + f"docker image inspect" f' --format="{{{{.Config.Labels.{name}}}}}"' - f' {self.get_tag()}' - )) == '""': - value = '' + f" {self.get_tag()}" + ) + ) == '""': + value = "" else: value = line.strip('"') @@ -70,7 +72,7 @@ def _get_actual_label_value_by_name(self) -> Mapping[str, str]: Get the hash of an image with the actual label values that are called for by the configuration. """ - return {'GitHash': self._get_actual_label_value(name='GitHash')} + return {"GitHash": self._get_actual_label_value(name="GitHash")} @memoize def get_actual_label_value_by_name(self) -> Mapping[str, str]: @@ -80,13 +82,14 @@ def get_actual_label_value_by_name(self) -> Mapping[str, str]: """ actual_label_value_by_name = self._get_actual_label_value_by_name() - self.log.debug('actual_label_value_by_name = %s', actual_label_value_by_name) + self.log.debug("actual_label_value_by_name = %s", actual_label_value_by_name) return actual_label_value_by_name @memoize def __get_base_build_names(self) -> Iterable[str]: seen_dockerfiles = set() + def inner(dockerfile: GenAbcDockerfile) -> Iterable[str]: build_names = [] if dockerfile not in seen_dockerfiles: @@ -99,7 +102,7 @@ def inner(dockerfile: GenAbcDockerfile) -> Iterable[str]: base_build_names = tuple(inner(self.dockerfile)) - self.log.debug('base_build_names = %s', base_build_names) + self.log.debug("base_build_names = %s", base_build_names) return base_build_names @@ -110,13 +113,13 @@ def get_sha(self) -> str: """ if lines := Host.execute_and_get_lines_sync( - f'docker image ls -q --no-trunc {self.get_tag()}' + f"docker image ls -q --no-trunc {self.get_tag()}" ): sha = next(iter(lines)) else: - sha = '' + sha = "" - self.log.debug('sha = %s', sha) + self.log.debug("sha = %s", sha) return sha @@ -126,9 +129,9 @@ def get_tag(self) -> str: Construct a tag from the name of the dockerfile. """ - tag = f'{self.dockerfile.get_name()}:latest' + tag = f"{self.dockerfile.get_name()}:latest" - self.log.debug('tag = %s', tag) + self.log.debug("tag = %s", tag) return tag @@ -137,9 +140,9 @@ def __get_wanted_git_hash(self) -> str: """ Request that GIT provides information about the SHA of the HEAD node. """ - wanted_git_hash = Host.execute_and_get_line_sync('git rev-parse HEAD') + wanted_git_hash = Host.execute_and_get_line_sync("git rev-parse HEAD") - self.log.debug('wanted_git_hash = %s', wanted_git_hash) + self.log.debug("wanted_git_hash = %s", wanted_git_hash) return wanted_git_hash @@ -147,7 +150,7 @@ def _get_wanted_label_value_by_name(self) -> Mapping[str, str]: """ Get the hash of the image with the label values that are called for by the configuration. """ - return {'GitHash': self.__get_wanted_git_hash()} + return {"GitHash": self.__get_wanted_git_hash()} @memoize def get_wanted_label_value_by_name(self) -> Mapping[str, str]: @@ -156,7 +159,7 @@ def get_wanted_label_value_by_name(self) -> Mapping[str, str]: """ wanted_label_value_by_name = self._get_wanted_label_value_by_name() - self.log.debug('wanted_label_value_by_name = %s', wanted_label_value_by_name) + self.log.debug("wanted_label_value_by_name = %s", wanted_label_value_by_name) return wanted_label_value_by_name @@ -169,39 +172,35 @@ def main(self) -> None: cached_images = "" if self.options.registry: - cached_images = ','.join( - [f'{self.options.registry}/{base_build_name}:latest' - for base_build_name in self.__get_base_build_names()]) + cached_images = ",".join( + [ + f"{self.options.registry}/{base_build_name}:latest" + for base_build_name in self.__get_base_build_names() + ] + ) cached_images = f"--cache-from {cached_images}" self.log.info('Creating image "%s"', self.get_tag()) Host.execute_sync( - f'docker buildx build' - f' -f {self.dockerfile.path}' - f' -t {self.get_tag()}' - - f'''{''.join([ + f"docker buildx build" + f" -f {self.dockerfile.path}" + f" -t {self.get_tag()}" + f"""{''.join([ f' --label {name}="{value}"' for name, value in (self.get_wanted_label_value_by_name()).items() - ])}''' - + ])}""" # Keep metadata about layers so that they can be used as a cache source. - f' --build-arg BUILDKIT_INLINE_CACHE=1' - - f' --platform linux/{self.options.platform}' - + f" --build-arg BUILDKIT_INLINE_CACHE=1" + f" --platform linux/{self.options.platform}" # Required to run production. - f' --shm-size 1gb' - + f" --shm-size 1gb" # Allow cloning repos with ssh. - f' --ssh default' - - f' {cached_images}' - - f' {GaiaPath.repo()}' + f" --ssh default" + f" {cached_images}" + f" {GaiaPath.repo()}" ) - Host.execute_sync('docker image prune -f') + Host.execute_sync("docker image prune -f") # pylint: disable=import-outside-toplevel # @@ -216,7 +215,9 @@ def cli_entrypoint(self) -> None: build = self else: from gdev.cmd.gen._custom.build import GenCustomBuild + build = GenCustomBuild(options=self.options, base_build=self) build.run() + # pylint: enable=import-outside-toplevel diff --git a/dev_tools/docker_dev/gdev/cmd/gen/_abc/cfg.py b/dev_tools/docker_dev/gdev/cmd/gen/_abc/cfg.py index 5ea9296771d7..b59f829c88f2 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/_abc/cfg.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/_abc/cfg.py @@ -8,6 +8,7 @@ """ Module to parse the target gdev.cfg for build rules. """ + from abc import ABC from inspect import getfile import re @@ -17,6 +18,7 @@ from gdev.dependency import Dependency from gdev.third_party.atools import memoize + class GenAbcCfg(Dependency, ABC): """ Class to parse the target gdev.cfg for build rules. @@ -28,8 +30,8 @@ def path(self) -> GaiaPath: """ Determine the path to the configuration file that we are observing. """ - path = GaiaPath.repo() / self.options.target / 'gdev.cfg' - self.log.debug('path = %s', path) + path = GaiaPath.repo() / self.options.target / "gdev.cfg" + self.log.debug("path = %s", path) return path @property @@ -37,7 +39,7 @@ def section_name(self) -> str: """ Determine the section name in the configuration based on the type of the class. """ - return GaiaPath(getfile(type(self))).parent.name.strip('_') + return GaiaPath(getfile(type(self))).parent.name.strip("_") @memoize def __get_begin_pattern(self) -> Pattern: @@ -45,8 +47,8 @@ def __get_begin_pattern(self) -> Pattern: Get the regex pattern that identifies the beginning of the section. """ - begin_pattern = re.compile(fr'^\[{self.section_name}]$') - self.log.debug('begin_pattern = %s', begin_pattern) + begin_pattern = re.compile(fr"^\[{self.section_name}]$") + self.log.debug("begin_pattern = %s", begin_pattern) return begin_pattern @memoize @@ -54,11 +56,10 @@ def __get_end_pattern(self) -> Pattern: """ Get the regex pattern that identifies the end of the section. """ - end_pattern = re.compile(r'^(# .*|)\[.+]$') - self.log.debug('end_pattern = %s', end_pattern) + end_pattern = re.compile(r"^(# .*|)\[.+]$") + self.log.debug("end_pattern = %s", end_pattern) return end_pattern - # pylint: disable=eval-used @staticmethod @memoize @@ -71,35 +72,39 @@ def get_lines(cfg_enables: FrozenSet[str], path: GaiaPath) -> Iterable[str]: # - `__locals` field set to an empty dictionary # - `__globals` field set to contain empty `__builtins__` item - return tuple(( - eval( - f'fr""" {line} """', - { - 'build_dir': GaiaPath.build, - 'enable_if': lambda enable: - '' if enable in cfg_enables + return tuple( + ( + eval( + f'fr""" {line} """', + { + "build_dir": GaiaPath.build, + "enable_if": lambda enable: "" + if enable in cfg_enables else f'# enable by setting "{enable}": ', - 'enable_if_not': lambda enable: - '' if enable not in cfg_enables + "enable_if_not": lambda enable: "" + if enable not in cfg_enables else f'# enable by not setting "{enable}": ', - 'enable_if_any': lambda *enables: - '' if set(enables) & cfg_enables + "enable_if_any": lambda *enables: "" + if set(enables) & cfg_enables else f'# enable by setting any of "{sorted(set(enables))}": ', - 'enable_if_not_any': lambda *enables: - '' if not (set(enables) & cfg_enables) + "enable_if_not_any": lambda *enables: "" + if not (set(enables) & cfg_enables) else f'# enable by not setting any of "{sorted(set(enables))}": ', - 'enable_if_all': lambda *enables: - '' if set(enables) in cfg_enables + "enable_if_all": lambda *enables: "" + if set(enables) in cfg_enables else f'# enable by setting all of "{sorted(set(enables))}": ', - 'enable_if_not_all': lambda *enables: - '' if not (set(enables) in cfg_enables) + "enable_if_not_all": lambda *enables: "" + if not (set(enables) in cfg_enables) else f'# enable by not setting all of "{sorted(set(enables))}": ', - 'source_dir': GaiaPath.source, - "__builtins__": {} - }, {} - )[1:-1] - for line in (GenAbcCfg.__get_raw_text(path)).splitlines() - )) + "source_dir": GaiaPath.source, + "__builtins__": {}, + }, + {}, + )[1:-1] + for line in (GenAbcCfg.__get_raw_text(path)).splitlines() + ) + ) + # pylint: enable=eval-used @staticmethod @@ -136,20 +141,20 @@ def get_section_lines(self) -> Iterable[str]: section_lines = [] for iline in ilines: line_parts = [iline] - while line_parts[-1].endswith('\\'): - if not (next_line := next(ilines)).strip().startswith('#'): + while line_parts[-1].endswith("\\"): + if not (next_line := next(ilines)).strip().startswith("#"): line_parts.append(next_line) - section_lines.append('\n '.join(line_parts)) + section_lines.append("\n ".join(line_parts)) section_lines = [ section_line for section_line in section_lines - if (section_line and not section_line.startswith('#')) + if (section_line and not section_line.startswith("#")) ] section_lines = tuple(section_lines) - self.log.debug('section_lines = %s', section_lines) + self.log.debug("section_lines = %s", section_lines) return section_lines @@ -158,4 +163,4 @@ def cli_entrypoint(self) -> None: """ Execution entrypoint for this module. """ - print(f'[{self.section_name}]\n' + '\n'.join(self.get_section_lines())) + print(f"[{self.section_name}]\n" + "\n".join(self.get_section_lines())) diff --git a/dev_tools/docker_dev/gdev/cmd/gen/_abc/dockerfile.py b/dev_tools/docker_dev/gdev/cmd/gen/_abc/dockerfile.py index 5e4a5dae3756..3c65907b8a39 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/_abc/dockerfile.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/_abc/dockerfile.py @@ -41,13 +41,13 @@ def path(self) -> GaiaPath: Return path where dockerfile is to be written. """ path = ( - GaiaPath.repo() - / '.gdev' - / self.options.target - / f'{self.cfg.section_name}.dockerfile.gdev' + GaiaPath.repo() + / ".gdev" + / self.options.target + / f"{self.cfg.section_name}.dockerfile.gdev" ) - self.log.debug('path = %s', path) + self.log.debug("path = %s", path) return path @@ -57,7 +57,8 @@ def get_base_stages_text(self) -> str: Get the text that applies to the base stages of the dockerfile. """ - base_stages_text = dedent(fr''' + base_stages_text = dedent( + fr""" #syntax=docker/dockerfile-upstream:master-experimental # Static definition of base stages. @@ -98,9 +99,10 @@ def get_base_stages_text(self) -> str: RUN apt-get update \ && DEBIAN_FRONTEND=noninteractive apt-get install -y wget \ && apt-get clean - ''').strip() + """ + ).strip() - self.log.debug('base_stages_text = %s', base_stages_text) + self.log.debug("base_stages_text = %s", base_stages_text) return base_stages_text @@ -113,6 +115,7 @@ def get_copy_section(self) -> str: Return text for the COPY section of the final build stage. """ from gdev.cmd.gen.pre_run.dockerfile import GenPreRunDockerfile + seen_dockerfiles = set() # Calculating which build stages to copy from has a tricky problem problems. Docker @@ -127,25 +130,27 @@ def inner(dockerfile: GenAbcDockerfile) -> Iterable[str]: for input_dockerfile in dockerfile.get_input_dockerfiles(): if input_dockerfile.get_run_section(): copy_section_parts.append( - f'COPY --from={input_dockerfile.get_name()} / /' + f"COPY --from={input_dockerfile.get_name()} / /" ) else: copy_section_parts += inner(input_dockerfile) path = dockerfile.cfg.path.parent - if ( - isinstance(dockerfile, GenPreRunDockerfile) - and set(path.iterdir()) - {dockerfile.cfg.path} - ): - copy_section_parts.append(f'COPY {path.context()} {path.image_source()}') + if isinstance(dockerfile, GenPreRunDockerfile) and set( + path.iterdir() + ) - {dockerfile.cfg.path}: + copy_section_parts.append( + f"COPY {path.context()} {path.image_source()}" + ) return copy_section_parts - copy_section = '\n'.join(inner(self)) + copy_section = "\n".join(inner(self)) - self.log.debug('copy_section = %s', copy_section) + self.log.debug("copy_section = %s", copy_section) return copy_section + # pylint: enable=import-outside-toplevel @memoize @@ -153,8 +158,8 @@ def get_env_section(self) -> str: """ Return text for the ENV section of the final build stage. """ - env_section = '' - self.log.debug('env_section = %s', env_section) + env_section = "" + self.log.debug("env_section = %s", env_section) return env_section @memoize @@ -162,17 +167,21 @@ def get_final_stage_text(self) -> str: """ Return the text for the final stage, built up of the individual sections, in order. """ - final_stage_text = '\n'.join(line for line in [ - f'\n# {self}', - self.get_from_section(), - self.get_copy_section(), - self.get_env_section(), - self.__get_workdir_section(), - self.get_run_section(), - 'ENTRYPOINT [ "/bin/bash" ]', - ] if line) - - self.log.debug('final_stage_text = %s', final_stage_text) + final_stage_text = "\n".join( + line + for line in [ + f"\n# {self}", + self.get_from_section(), + self.get_copy_section(), + self.get_env_section(), + self.__get_workdir_section(), + self.get_run_section(), + 'ENTRYPOINT [ "/bin/bash" ]', + ] + if line + ) + + self.log.debug("final_stage_text = %s", final_stage_text) return final_stage_text @@ -181,9 +190,9 @@ def get_from_section(self) -> str: """ Return text for the FROM line of the final build stage. """ - from_section = f'FROM base AS {self.get_name()}' + from_section = f"FROM base AS {self.get_name()}" - self.log.debug('from_section = %s', from_section) + self.log.debug("from_section = %s", from_section) return from_section @@ -194,7 +203,7 @@ def get_input_dockerfiles(self) -> Iterable[GenAbcDockerfile]: """ input_dockerfiles = tuple() - self.log.debug('input_dockerfiles = %s', input_dockerfiles) + self.log.debug("input_dockerfiles = %s", input_dockerfiles) return input_dockerfiles @@ -203,9 +212,11 @@ def get_name(self) -> str: """ Return the name of the final build stage, for e.g. `FROM AS `. """ - name = f'{self.options.target.replace("/", "__")}__{self.cfg.section_name}'.lower() + name = ( + f'{self.options.target.replace("/", "__")}__{self.cfg.section_name}'.lower() + ) - self.log.debug('name = %s', name) + self.log.debug("name = %s", name) return name @@ -214,21 +225,22 @@ def get_run_section(self) -> str: """ Return text for the RUN line of the final build stage. """ - run_section = '' + run_section = "" - self.log.debug('run_section = %s', run_section) + self.log.debug("run_section = %s", run_section) return run_section - - def __get_dockerfile_sections_recursive(self, dockerfile: GenAbcDockerfile, - seen_dockerfiles) -> Iterable[str]: + def __get_dockerfile_sections_recursive( + self, dockerfile: GenAbcDockerfile, seen_dockerfiles + ) -> Iterable[str]: text_parts = [] if dockerfile not in seen_dockerfiles: seen_dockerfiles.add(dockerfile) for input_dockerfile in dockerfile.get_input_dockerfiles(): - text_parts += self.__get_dockerfile_sections_recursive(input_dockerfile, - seen_dockerfiles) + text_parts += self.__get_dockerfile_sections_recursive( + input_dockerfile, seen_dockerfiles + ) if dockerfile.get_run_section() or dockerfile is self: text_parts.append(dockerfile.get_final_stage_text()) return text_parts @@ -248,10 +260,14 @@ def get_text(self) -> str: # as it will need to copy from any of the missing, empty stage's non-empty input stages # instead. - text = '\n'.join([self.get_base_stages_text(), \ - *self.__get_dockerfile_sections_recursive(self, seen_dockerfiles)]) + text = "\n".join( + [ + self.get_base_stages_text(), + *self.__get_dockerfile_sections_recursive(self, seen_dockerfiles), + ] + ) - self.log.debug('text = %s', text) + self.log.debug("text = %s", text) return text @@ -260,9 +276,9 @@ def __get_workdir_section(self) -> str: """ Return text for the WORKDIR line of the final build stage. """ - workdir_section = f'WORKDIR {self.cfg.path.parent.image_build()}' + workdir_section = f"WORKDIR {self.cfg.path.parent.image_build()}" - self.log.debug('workdir_section = %s', workdir_section) + self.log.debug("workdir_section = %s", workdir_section) return workdir_section @@ -271,7 +287,7 @@ def main(self) -> None: """ Mainline interface. """ - self.log.info('Creating dockerfile %s', self.path) + self.log.info("Creating dockerfile %s", self.path) self.path.write_text(data=self.get_text()) # pylint: disable=import-outside-toplevel @@ -289,8 +305,10 @@ def cli_entrypoint(self) -> None: dockerfile = self else: from gdev.cmd.gen._custom.dockerfile import GenCustomDockerfile + dockerfile = GenCustomDockerfile(options=self.options, base_dockerfile=self) dockerfile.run() print(dockerfile.get_text()) + # pylint: enable=import-outside-toplevel diff --git a/dev_tools/docker_dev/gdev/cmd/gen/_abc/push.py b/dev_tools/docker_dev/gdev/cmd/gen/_abc/push.py index 08edf32d2c84..4f9c2446752a 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/_abc/push.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/_abc/push.py @@ -8,6 +8,7 @@ """ Module to provide for the necessary actions to perform a push of the image. """ + from abc import ABC, abstractmethod from gdev.dependency import Dependency @@ -37,5 +38,5 @@ def main(self) -> None: self.build.main() tag = self.build.get_tag() - Host.execute_sync(f'docker tag {tag} {self.options.registry}/{tag}') - Host.execute_sync(f'docker push {self.options.registry}/{tag}') + Host.execute_sync(f"docker tag {tag} {self.options.registry}/{tag}") + Host.execute_sync(f"docker push {self.options.registry}/{tag}") diff --git a/dev_tools/docker_dev/gdev/cmd/gen/_abc/run.py b/dev_tools/docker_dev/gdev/cmd/gen/_abc/run.py index de30f460650c..5fb353c17c07 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/_abc/run.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/_abc/run.py @@ -20,6 +20,7 @@ from gdev.host import Host from gdev.cmd.gen._abc.build import GenAbcBuild + class GenAbcRun(Dependency, ABC): """ Create a Docker container from the image build with `gdev build` and run a command in it. @@ -40,52 +41,50 @@ def _get_flags(self) -> str: """ flags_parts = [ # Remove the container once we exit it. - '--rm', - + "--rm", # Use a minimal init system to allow starting services. - '--init', - + "--init", # Usually the default entrypoint, but override it to be certain. - '--entrypoint /bin/bash', - - f'--hostname {self.build.dockerfile.get_name()}', - f'--platform linux/{self.options.platform}', - '--privileged', - + "--entrypoint /bin/bash", + f"--hostname {self.build.dockerfile.get_name()}", + f"--platform linux/{self.options.platform}", + "--privileged", # Mount our current repo as /source/. Modifications to source in the container # are reflected on host. - f' --volume {GaiaPath.repo()}:{GaiaPath.repo().image_source()}' + f" --volume {GaiaPath.repo()}:{GaiaPath.repo().image_source()}", ] # Handle non-TTY environments as well, e.g. TeamCity continuous integration. if sys.stdout.isatty(): - flags_parts.append('-it') + flags_parts.append("-it") # Ports to expose between container and host. ports = set(self.options.ports) - if {'clion', 'sshd', 'vscode'} & self.options.mixins: - ports.add('22') - if (authorized_keys_path := GaiaPath.home() / '.ssh' / 'authorized_keys').is_file(): - if {'clion', 'sudo', 'vscode'} & self.options.mixins: + if {"clion", "sshd", "vscode"} & self.options.mixins: + ports.add("22") + if ( + authorized_keys_path := GaiaPath.home() / ".ssh" / "authorized_keys" + ).is_file(): + if {"clion", "sudo", "vscode"} & self.options.mixins: flags_parts.append( - f'-v {authorized_keys_path.absolute()}:{authorized_keys_path.absolute()}' + f"-v {authorized_keys_path.absolute()}:{authorized_keys_path.absolute()}" ) else: flags_parts.append( - f'-v {authorized_keys_path.absolute()}:/root/.ssh/authorized_keys' + f"-v {authorized_keys_path.absolute()}:/root/.ssh/authorized_keys" ) if ports: - flags_parts.append('-p ' + ' '.join(f'{port}:{port}' for port in ports)) + flags_parts.append("-p " + " ".join(f"{port}:{port}" for port in ports)) # Additional mounts to bind between container and host. for mount in self.options.mounts: flags_parts.append( - f'--mount type=volume' - f',dst={mount.container_path}' - f',volume-driver=local' - f',volume-opt=type=none' - f',volume-opt=o=bind' - f',volume-opt=device={mount.host_path}' + f"--mount type=volume" + f",dst={mount.container_path}" + f",volume-driver=local" + f",volume-opt=type=none" + f",volume-opt=o=bind" + f",volume-opt=device={mount.host_path}" ) - flags = ' '.join(flags_parts) + flags = " ".join(flags_parts) return flags @@ -96,7 +95,7 @@ def get_flags(self) -> str: """ flags = self._get_flags() - self.log.debug('flags = %s', flags) + self.log.debug("flags = %s", flags) return flags @@ -106,33 +105,37 @@ def main(self) -> None: Main action to invoke for this class. """ if ( - self.options.force - or (not self.build.get_sha()) - or ( - self.build.get_wanted_label_value_by_name() - != self.build.get_actual_label_value_by_name() - ) + self.options.force + or (not self.build.get_sha()) + or ( + self.build.get_wanted_label_value_by_name() + != self.build.get_actual_label_value_by_name() + ) ): self.build.run() if self.options.mounts: for mount in self.options.mounts: if mount.host_path.exists(): - self.log.info('Binding existing host path "%s" into container.', \ - mount.host_path) + self.log.info( + 'Binding existing host path "%s" into container.', + mount.host_path, + ) else: mount.host_path.mkdir(parents=True) # execvpe the `docker run` command. It's drastically simpler than trying to manage it as a # Python subprocess. - command_to_execute = (f'docker run {self.get_flags()}' - f' {self.build.get_tag()}' - f'''{fr' -c "{self.options.args}"' if self.options.args else ""}''') + command_to_execute = ( + f"docker run {self.get_flags()}" + f" {self.build.get_tag()}" + f"""{fr' -c "{self.options.args}"' if self.options.args else ""}""" + ) if Host.is_drydock_enabled(): print(f"[execvpe:{command_to_execute}]") else: command = shlex.split(command_to_execute) - self.log.debug('execvpe command=%s', command) + self.log.debug("execvpe command=%s", command) os.execvpe(command[0], command, os.environ) # pylint: disable=import-outside-toplevel @@ -147,7 +150,9 @@ def cli_entrypoint(self) -> None: run = self else: from gdev.cmd.gen._custom.run import GenCustomRun + run = GenCustomRun(options=self.options, base_run=self) run.run() + # pylint: enable=import-outside-toplevel diff --git a/dev_tools/docker_dev/gdev/cmd/gen/_custom/build.py b/dev_tools/docker_dev/gdev/cmd/gen/_custom/build.py index 9774e17f4577..e87eba68cfa2 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/_custom/build.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/_custom/build.py @@ -8,12 +8,13 @@ """ Module to satisfy the build requirements to generate the dockerfile where MIXINs are used. """ + from dataclasses import dataclass from typing import Mapping from gdev.third_party.atools import memoize -from .dockerfile import GenCustomDockerfile -from .._abc.build import GenAbcBuild +from gdev.cmd.gen._custom.dockerfile import GenCustomDockerfile +from gdev.cmd.gen._abc.build import GenAbcBuild @dataclass(frozen=True, repr=False) @@ -21,6 +22,7 @@ class GenCustomBuild(GenAbcBuild): """ Class to satisfy the build requirements to generate the dockerfile where MIXINs are used. """ + base_build: GenAbcBuild @property @@ -37,7 +39,7 @@ def _get_wanted_label_value_by_name(self) -> Mapping[str, str]: """ return { **super()._get_wanted_label_value_by_name(), - 'Mixins': f'{sorted(self.options.mixins)}'.replace(' ', '') + "Mixins": f"{sorted(self.options.mixins)}".replace(" ", ""), } @memoize @@ -48,5 +50,5 @@ def _get_actual_label_value_by_name(self) -> Mapping[str, str]: """ return { **super()._get_actual_label_value_by_name(), - 'Mixins': super()._get_actual_label_value('Mixins') + "Mixins": super()._get_actual_label_value("Mixins"), } diff --git a/dev_tools/docker_dev/gdev/cmd/gen/_custom/cfg.py b/dev_tools/docker_dev/gdev/cmd/gen/_custom/cfg.py index c53695912c94..408db92c8d5f 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/_custom/cfg.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/_custom/cfg.py @@ -8,11 +8,12 @@ """ Module to provide a subclass of the GenAbcCfg class for the Apt section where MIXINs are used. """ + from typing import Iterable from gdev.custom.gaia_path import GaiaPath from gdev.third_party.atools import memoize -from .._abc.cfg import GenAbcCfg +from gdev.cmd.gen._abc.cfg import GenAbcCfg class GenCustomCfg(GenAbcCfg): @@ -26,9 +27,13 @@ def get_mixin_lines(self) -> Iterable[str]: """ Get the various lines for the section, all derived from the needed mixins. """ - lines = tuple((f'{GaiaPath.mixin().context() / mixin}' \ - for mixin in sorted(self.options.mixins))) - - self.log.info('lines = %s', lines) + lines = tuple( + ( + f"{GaiaPath.mixin().context() / mixin}" + for mixin in sorted(self.options.mixins) + ) + ) + + self.log.info("lines = %s", lines) return lines diff --git a/dev_tools/docker_dev/gdev/cmd/gen/_custom/dockerfile.py b/dev_tools/docker_dev/gdev/cmd/gen/_custom/dockerfile.py index d83f21e27553..a6b778fbd2e9 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/_custom/dockerfile.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/_custom/dockerfile.py @@ -8,6 +8,7 @@ """ Module to generate the RUN section of the dockerfile where MIXINs are used. """ + from dataclasses import dataclass, replace import os from textwrap import dedent @@ -16,7 +17,7 @@ from gdev.custom.gaia_path import GaiaPath from gdev.third_party.atools import memoize from gdev.cmd.gen._abc.dockerfile import GenAbcDockerfile -from .cfg import GenCustomCfg +from gdev.cmd.gen._custom.cfg import GenCustomCfg @dataclass(frozen=True, repr=False) @@ -25,6 +26,7 @@ class GenCustomDockerfile(GenAbcDockerfile): Class to provide a customized GenAbcDockerfile where user properties from the host of the container are taken into account. """ + base_dockerfile: GenAbcDockerfile @property @@ -46,9 +48,10 @@ def get_env_section(self) -> str: env_section = GenPreRunDockerfile(self.options).get_env_section() - self.log.debug('env_section = %s', env_section) + self.log.debug("env_section = %s", env_section) return env_section + # pylint: enable=import-outside-toplevel # pylint: disable=import-outside-toplevel @@ -63,12 +66,15 @@ def get_input_dockerfiles(self) -> Iterable[GenAbcDockerfile]: input_dockerfiles = [self.base_dockerfile] for line in self.cfg.get_mixin_lines(): - input_dockerfiles.append(GenRunDockerfile(replace(self.options, target=line))) + input_dockerfiles.append( + GenRunDockerfile(replace(self.options, target=line)) + ) input_dockerfiles = tuple(input_dockerfiles) - self.log.debug('input_dockerfiles = %s', input_dockerfiles) + self.log.debug("input_dockerfiles = %s", input_dockerfiles) return input_dockerfiles + # pylint: enable=import-outside-toplevel @memoize @@ -82,12 +88,14 @@ def get_text(self) -> str: """ text_parts = [super().get_text()] - if {'clion', 'sudo', 'vscode'} & self.options.mixins: + if {"clion", "sudo", "vscode"} & self.options.mixins: uid = os.getuid() gid = os.getgid() home = GaiaPath.home() login = os.getlogin() - text_parts.append(dedent(fr''' + text_parts.append( + dedent( + fr""" RUN groupadd -r -o -g {gid} {login} \ && useradd {login} -l -r -o -u {uid} -g {gid} -G sudo \ && mkdir -p {home} \ @@ -95,10 +103,12 @@ def get_text(self) -> str: && echo "{login} ALL=(ALL:ALL) NOPASSWD: ALL" >> /etc/sudoers \ && touch {home}/.sudo_as_admin_successful \ && chown -R {login}:{login} {GaiaPath.repo().image_build()} - ''').strip()) + """ + ).strip() + ) - text = '\n'.join(text_parts) + text = "\n".join(text_parts) - self.log.debug('text = %s', text) + self.log.debug("text = %s", text) return text diff --git a/dev_tools/docker_dev/gdev/cmd/gen/_custom/run.py b/dev_tools/docker_dev/gdev/cmd/gen/_custom/run.py index d5aa21825b24..ac58252c70a9 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/_custom/run.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/_custom/run.py @@ -8,12 +8,13 @@ """ Module to satisfy the run requirements a RUN section that includes MIXINs. """ + from dataclasses import dataclass import os from gdev.third_party.atools import memoize -from .build import GenCustomBuild -from .._abc.run import GenAbcRun +from gdev.cmd.gen._custom.build import GenCustomBuild +from gdev.cmd.gen._abc.run import GenAbcRun @dataclass(frozen=True, repr=False) @@ -21,6 +22,7 @@ class GenCustomRun(GenAbcRun): """ Class to satisfy the run requirements a RUN section that includes MIXINs. """ + base_run: GenAbcRun @property @@ -39,14 +41,11 @@ def _get_flags(self) -> str: flags_parts = [super()._get_flags()] # gdb needs ptrace enabled in order to attach to a process. Additionally, # seccomp=unconfined is recommended for gdb, and we don't run in a hostile environment. - if {'clion', 'gdb', 'vscode'} & self.options.mixins: - flags_parts += [ - '--cap-add=SYS_PTRACE', - '--security-opt seccomp=unconfined' - ] - if {'clion', 'sudo', 'vscode'} & self.options.mixins: - flags_parts.append(f'--user {os.getuid()}:{os.getgid()}') - - flags = ' '.join(flags_parts) + if {"clion", "gdb", "vscode"} & self.options.mixins: + flags_parts += ["--cap-add=SYS_PTRACE", "--security-opt seccomp=unconfined"] + if {"clion", "sudo", "vscode"} & self.options.mixins: + flags_parts.append(f"--user {os.getuid()}:{os.getgid()}") + + flags = " ".join(flags_parts) return flags diff --git a/dev_tools/docker_dev/gdev/cmd/gen/apt/build.py b/dev_tools/docker_dev/gdev/cmd/gen/apt/build.py index 647f87801e94..4c408c030630 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/apt/build.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/apt/build.py @@ -8,8 +8,9 @@ """ Module to satisfy the build requirements to generate the dockerfile for the APT section. """ -from .dockerfile import GenAptDockerfile -from .._abc.build import GenAbcBuild + +from gdev.cmd.gen.apt.dockerfile import GenAptDockerfile +from gdev.cmd.gen._abc.build import GenAbcBuild class GenAptBuild(GenAbcBuild): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/apt/cfg.py b/dev_tools/docker_dev/gdev/cmd/gen/apt/cfg.py index dd4368430c98..0304f11298e1 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/apt/cfg.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/apt/cfg.py @@ -8,7 +8,8 @@ """ Module to provide a subclass of the GenAbcCfg class for the Apt section. """ -from .._abc.cfg import GenAbcCfg + +from gdev.cmd.gen._abc.cfg import GenAbcCfg class GenAptCfg(GenAbcCfg): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/apt/dockerfile.py b/dev_tools/docker_dev/gdev/cmd/gen/apt/dockerfile.py index f91f935fb519..f99a35ba0179 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/apt/dockerfile.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/apt/dockerfile.py @@ -8,9 +8,10 @@ """ Module to generate the APT section of the dockerfile. """ + from gdev.third_party.atools import memoize from gdev.cmd.gen._abc.dockerfile import GenAbcDockerfile -from .cfg import GenAptCfg +from gdev.cmd.gen.apt.cfg import GenAptCfg class GenAptDockerfile(GenAbcDockerfile): @@ -30,9 +31,9 @@ def get_from_section(self) -> str: """ Return text for the FROM line of the final build stage. """ - from_section = f'FROM apt_base AS {self.get_name()}' + from_section = f"FROM apt_base AS {self.get_name()}" - self.log.debug('from_section = %s', from_section) + self.log.debug("from_section = %s", from_section) return from_section @@ -43,15 +44,15 @@ def get_run_section(self) -> str: """ if section_lines := self.cfg.get_section_lines(): run_section = ( - 'RUN apt-get update' - + ' \\\n && DEBIAN_FRONTEND=noninteractive apt-get install -y' - + ' \\\n ' - + ' \\\n '.join(section_lines) - + ' \\\n && apt-get clean' + "RUN apt-get update" + + " \\\n && DEBIAN_FRONTEND=noninteractive apt-get install -y" + + " \\\n " + + " \\\n ".join(section_lines) + + " \\\n && apt-get clean" ) else: - run_section = '' + run_section = "" - self.log.debug('run_section = %s', run_section) + self.log.debug("run_section = %s", run_section) return run_section diff --git a/dev_tools/docker_dev/gdev/cmd/gen/apt/push.py b/dev_tools/docker_dev/gdev/cmd/gen/apt/push.py index 441f33818378..d82b5bedac3c 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/apt/push.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/apt/push.py @@ -8,8 +8,9 @@ """ Module to satisfy the push requirements for the APT section. """ -from .build import GenAptBuild -from .._abc.push import GenAbcPush + +from gdev.cmd.gen.apt.build import GenAptBuild +from gdev.cmd.gen._abc.push import GenAbcPush class GenAptPush(GenAbcPush): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/apt/run.py b/dev_tools/docker_dev/gdev/cmd/gen/apt/run.py index 1b0ce094374d..9682922aaa9b 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/apt/run.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/apt/run.py @@ -9,8 +9,8 @@ Module to satisfy the run requirements for the APT section. """ -from .build import GenAptBuild -from .._abc.run import GenAbcRun +from gdev.cmd.gen.apt.build import GenAptBuild +from gdev.cmd.gen._abc.run import GenAbcRun class GenAptRun(GenAbcRun): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/env/build.py b/dev_tools/docker_dev/gdev/cmd/gen/env/build.py index da27247e295f..01b7c911758f 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/env/build.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/env/build.py @@ -8,8 +8,9 @@ """ Module to satisfy the build requirements to generate the dockerfile for the ENV section. """ -from .dockerfile import GenEnvDockerfile -from .._abc.build import GenAbcBuild + +from gdev.cmd.gen.env.dockerfile import GenEnvDockerfile +from gdev.cmd.gen._abc.build import GenAbcBuild class GenEnvBuild(GenAbcBuild): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/env/cfg.py b/dev_tools/docker_dev/gdev/cmd/gen/env/cfg.py index e38abfe1e065..452940f5b8d0 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/env/cfg.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/env/cfg.py @@ -8,7 +8,8 @@ """ Module to provide a subclass of the GenAbcCfg class for the Env section. """ -from .._abc.cfg import GenAbcCfg + +from gdev.cmd.gen._abc.cfg import GenAbcCfg class GenEnvCfg(GenAbcCfg): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/env/dockerfile.py b/dev_tools/docker_dev/gdev/cmd/gen/env/dockerfile.py index 21dc4be31b2b..ba01f2292fcd 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/env/dockerfile.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/env/dockerfile.py @@ -14,7 +14,7 @@ from gdev.third_party.atools import memoize from gdev.cmd.gen._abc.dockerfile import GenAbcDockerfile -from .cfg import GenEnvCfg +from gdev.cmd.gen.env.cfg import GenEnvCfg class GenEnvDockerfile(GenAbcDockerfile): @@ -41,10 +41,13 @@ def get_input_dockerfiles(self) -> Iterable[GenAbcDockerfile]: input_dockerfiles = [] for section_line in GenGaiaDockerfile(self.options).cfg.get_section_lines(): - input_dockerfiles.append(GenEnvDockerfile(replace(self.options, target=section_line))) + input_dockerfiles.append( + GenEnvDockerfile(replace(self.options, target=section_line)) + ) input_dockerfiles = tuple(input_dockerfiles) - self.log.debug('input_dockerfiles = %s', input_dockerfiles) + self.log.debug("input_dockerfiles = %s", input_dockerfiles) return input_dockerfiles + # pylint: enable=import-outside-toplevel diff --git a/dev_tools/docker_dev/gdev/cmd/gen/env/push.py b/dev_tools/docker_dev/gdev/cmd/gen/env/push.py index 4fa9125a8920..8afeafc40cc4 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/env/push.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/env/push.py @@ -8,8 +8,9 @@ """ Module to satisfy the push requirements for the ENV section. """ -from .build import GenEnvBuild -from .._abc.push import GenAbcPush + +from gdev.cmd.gen.env.build import GenEnvBuild +from gdev.cmd.gen._abc.push import GenAbcPush class GenEnvPush(GenAbcPush): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/env/run.py b/dev_tools/docker_dev/gdev/cmd/gen/env/run.py index f1dab9bf9565..9452a1dbdba2 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/env/run.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/env/run.py @@ -8,8 +8,9 @@ """ Module to satisfy the run requirements for the ENV section. """ -from .build import GenEnvBuild -from .._abc.run import GenAbcRun + +from gdev.cmd.gen.env.build import GenEnvBuild +from gdev.cmd.gen._abc.run import GenAbcRun class GenEnvRun(GenAbcRun): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/gaia/build.py b/dev_tools/docker_dev/gdev/cmd/gen/gaia/build.py index f48ff9859f5d..2b75f7bf9761 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/gaia/build.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/gaia/build.py @@ -8,8 +8,9 @@ """ Module to satisfy the build requirements to generate the dockerfile for the GAIA section. """ -from .dockerfile import GenGaiaDockerfile -from .._abc.build import GenAbcBuild + +from gdev.cmd.gen.gaia.dockerfile import GenGaiaDockerfile +from gdev.cmd.gen._abc.build import GenAbcBuild class GenGaiaBuild(GenAbcBuild): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/gaia/cfg.py b/dev_tools/docker_dev/gdev/cmd/gen/gaia/cfg.py index 55e704d7b376..89dbec84bce7 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/gaia/cfg.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/gaia/cfg.py @@ -8,7 +8,8 @@ """ Module to provide a subclass of the GenAbcCfg class for the Gaia section. """ -from .._abc.cfg import GenAbcCfg + +from gdev.cmd.gen._abc.cfg import GenAbcCfg class GenGaiaCfg(GenAbcCfg): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/gaia/dockerfile.py b/dev_tools/docker_dev/gdev/cmd/gen/gaia/dockerfile.py index 46a76052e9f5..f83bc4d3c9ce 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/gaia/dockerfile.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/gaia/dockerfile.py @@ -14,7 +14,7 @@ from gdev.third_party.atools import memoize from gdev.cmd.gen._abc.dockerfile import GenAbcDockerfile -from .cfg import GenGaiaCfg +from gdev.cmd.gen.gaia.cfg import GenGaiaCfg class GenGaiaDockerfile(GenAbcDockerfile): @@ -41,10 +41,13 @@ def get_input_dockerfiles(self) -> Iterable[GenAbcDockerfile]: input_dockerfiles = [] for section_line in self.cfg.get_section_lines(): - input_dockerfiles.append(GenRunDockerfile(replace(self.options, target=section_line))) + input_dockerfiles.append( + GenRunDockerfile(replace(self.options, target=section_line)) + ) input_dockerfiles = tuple(input_dockerfiles) - self.log.debug('input_dockerfiles = %s', input_dockerfiles) + self.log.debug("input_dockerfiles = %s", input_dockerfiles) return input_dockerfiles + # pylint: enable=import-outside-toplevel diff --git a/dev_tools/docker_dev/gdev/cmd/gen/gaia/push.py b/dev_tools/docker_dev/gdev/cmd/gen/gaia/push.py index e15e13315934..71f60396637d 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/gaia/push.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/gaia/push.py @@ -8,8 +8,9 @@ """ Module to satisfy the push requirements for the GAIA section. """ -from .build import GenGaiaBuild -from .._abc.push import GenAbcPush + +from gdev.cmd.gen.gaia.build import GenGaiaBuild +from gdev.cmd.gen._abc.push import GenAbcPush class GenGaiaPush(GenAbcPush): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/gaia/run.py b/dev_tools/docker_dev/gdev/cmd/gen/gaia/run.py index ba0f3de91322..62f5b046d874 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/gaia/run.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/gaia/run.py @@ -8,8 +8,9 @@ """ Module to satisfy the run requirements for the GAIA section. """ -from .build import GenGaiaBuild -from .._abc.run import GenAbcRun + +from gdev.cmd.gen.gaia.build import GenGaiaBuild +from gdev.cmd.gen._abc.run import GenAbcRun class GenGaiaRun(GenAbcRun): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/git/build.py b/dev_tools/docker_dev/gdev/cmd/gen/git/build.py index 7e2a0c26fd90..e27dfe2b30f9 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/git/build.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/git/build.py @@ -8,8 +8,9 @@ """ Module to satisfy the build requirements to generate the dockerfile for the GIT section. """ -from .dockerfile import GenGitDockerfile -from .._abc.build import GenAbcBuild + +from gdev.cmd.gen.git.dockerfile import GenGitDockerfile +from gdev.cmd.gen._abc.build import GenAbcBuild class GenGitBuild(GenAbcBuild): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/git/cfg.py b/dev_tools/docker_dev/gdev/cmd/gen/git/cfg.py index 942c71d4b7c7..0f6f8610b8a4 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/git/cfg.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/git/cfg.py @@ -8,7 +8,8 @@ """ Module to provide a subclass of the GenAbcCfg class for the Git section. """ -from .._abc.cfg import GenAbcCfg + +from gdev.cmd.gen._abc.cfg import GenAbcCfg class GenGitCfg(GenAbcCfg): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/git/dockerfile.py b/dev_tools/docker_dev/gdev/cmd/gen/git/dockerfile.py index 16b08eb2a8e0..0427985ffd39 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/git/dockerfile.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/git/dockerfile.py @@ -11,7 +11,7 @@ from gdev.third_party.atools import memoize from gdev.cmd.gen._abc.dockerfile import GenAbcDockerfile -from .cfg import GenGitCfg +from gdev.cmd.gen.git.cfg import GenGitCfg class GenGitDockerfile(GenAbcDockerfile): @@ -31,9 +31,9 @@ def get_from_section(self) -> str: """ Return text for the FROM line of the final build stage. """ - from_section = f'FROM git_base AS {self.get_name()}' + from_section = f"FROM git_base AS {self.get_name()}" - self.log.debug('from_section = %s', from_section) + self.log.debug("from_section = %s", from_section) return from_section @@ -44,16 +44,19 @@ def get_run_section(self) -> str: """ if section_lines := self.cfg.get_section_lines(): run_section = ( - 'RUN ' - + ' \\\n && '.join( - [f'git clone --depth 1 {section_line}' for section_line in section_lines] - ) - + ' \\\n && rm -rf */.git' - + ' \\\n && apt-get remove --autoremove -y git' + "RUN " + + " \\\n && ".join( + [ + f"git clone --depth 1 {section_line}" + for section_line in section_lines + ] + ) + + " \\\n && rm -rf */.git" + + " \\\n && apt-get remove --autoremove -y git" ) else: - run_section = '' + run_section = "" - self.log.debug('run_section = %s', run_section) + self.log.debug("run_section = %s", run_section) return run_section diff --git a/dev_tools/docker_dev/gdev/cmd/gen/git/push.py b/dev_tools/docker_dev/gdev/cmd/gen/git/push.py index 835a1d64b880..c19b2f112738 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/git/push.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/git/push.py @@ -8,8 +8,9 @@ """ Module to satisfy the push requirements for the GIT section. """ -from .build import GenGitBuild -from .._abc.push import GenAbcPush + +from gdev.cmd.gen.git.build import GenGitBuild +from gdev.cmd.gen._abc.push import GenAbcPush class GenGitPush(GenAbcPush): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/pip/build.py b/dev_tools/docker_dev/gdev/cmd/gen/pip/build.py index aaf1e2098477..faa916a4ff17 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/pip/build.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/pip/build.py @@ -8,8 +8,9 @@ """ Module to satisfy the build requirements to generate the dockerfile for the PIP section. """ -from .dockerfile import GenPipDockerfile -from .._abc.build import GenAbcBuild + +from gdev.cmd.gen.pip.dockerfile import GenPipDockerfile +from gdev.cmd.gen._abc.build import GenAbcBuild class GenPipBuild(GenAbcBuild): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/pip/cfg.py b/dev_tools/docker_dev/gdev/cmd/gen/pip/cfg.py index dc073770ebf8..11a7a392c248 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/pip/cfg.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/pip/cfg.py @@ -8,7 +8,8 @@ """ Module to provide a subclass of the GenAbcCfg class for the Pip section. """ -from .._abc.cfg import GenAbcCfg + +from gdev.cmd.gen._abc.cfg import GenAbcCfg class GenPipCfg(GenAbcCfg): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/pip/dockerfile.py b/dev_tools/docker_dev/gdev/cmd/gen/pip/dockerfile.py index 09504f30d51d..20ef39022ac3 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/pip/dockerfile.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/pip/dockerfile.py @@ -11,7 +11,7 @@ from gdev.third_party.atools import memoize from gdev.cmd.gen._abc.dockerfile import GenAbcDockerfile -from .cfg import GenPipCfg +from gdev.cmd.gen.pip.cfg import GenPipCfg class GenPipDockerfile(GenAbcDockerfile): @@ -31,9 +31,9 @@ def get_from_section(self) -> str: """ Return text for the FROM line of the final build stage. """ - from_section = f'FROM pip_base AS {self.get_name()}' + from_section = f"FROM pip_base AS {self.get_name()}" - self.log.debug('from_section = %s', from_section) + self.log.debug("from_section = %s", from_section) return from_section @@ -44,13 +44,13 @@ def get_run_section(self) -> str: """ if section_lines := self.cfg.get_section_lines(): run_section = ( - 'RUN python3 -m pip install ' - + ' \\\n '.join(section_lines) - + ' \\\n && apt-get remove --autoremove -y python3-pip' + "RUN python3 -m pip install " + + " \\\n ".join(section_lines) + + " \\\n && apt-get remove --autoremove -y python3-pip" ) else: - run_section = '' + run_section = "" - self.log.debug('run_section = %s', run_section) + self.log.debug("run_section = %s", run_section) return run_section diff --git a/dev_tools/docker_dev/gdev/cmd/gen/pip/push.py b/dev_tools/docker_dev/gdev/cmd/gen/pip/push.py index 75ef6d2fae93..5870f54b0740 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/pip/push.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/pip/push.py @@ -8,8 +8,9 @@ """ Module to satisfy the push requirements for the PIP section. """ -from .build import GenPipBuild -from .._abc.push import GenAbcPush + +from gdev.cmd.gen.pip.build import GenPipBuild +from gdev.cmd.gen._abc.push import GenAbcPush class GenPipPush(GenAbcPush): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/pip/run.py b/dev_tools/docker_dev/gdev/cmd/gen/pip/run.py index 1f07499a7a4b..39e4b69369a6 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/pip/run.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/pip/run.py @@ -8,8 +8,9 @@ """ Module to satisfy the run requirements for the PIP section. """ -from .build import GenPipBuild -from .._abc.run import GenAbcRun + +from gdev.cmd.gen.pip.build import GenPipBuild +from gdev.cmd.gen._abc.run import GenAbcRun class GenPipRun(GenAbcRun): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/pre_run/build.py b/dev_tools/docker_dev/gdev/cmd/gen/pre_run/build.py index c52e9ed9d6e2..8c246842aafc 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/pre_run/build.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/pre_run/build.py @@ -8,8 +8,9 @@ """ Module to satisfy the build requirements to generate the dockerfile for the PRERUN section. """ -from .dockerfile import GenPreRunDockerfile -from .._abc.build import GenAbcBuild + +from gdev.cmd.gen.pre_run.dockerfile import GenPreRunDockerfile +from gdev.cmd.gen._abc.build import GenAbcBuild class GenPreRunBuild(GenAbcBuild): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/pre_run/cfg.py b/dev_tools/docker_dev/gdev/cmd/gen/pre_run/cfg.py index 12fd5dfba9e2..26067b5f9968 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/pre_run/cfg.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/pre_run/cfg.py @@ -8,7 +8,8 @@ """ Module to provide a subclass of the GenAbcCfg class for the PreRun section. """ -from .._abc.cfg import GenAbcCfg + +from gdev.cmd.gen._abc.cfg import GenAbcCfg class GenPreRunCfg(GenAbcCfg): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/pre_run/dockerfile.py b/dev_tools/docker_dev/gdev/cmd/gen/pre_run/dockerfile.py index f31cba195505..bf457a1838d2 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/pre_run/dockerfile.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/pre_run/dockerfile.py @@ -8,11 +8,13 @@ """ Module to generate the PRERUN section of the dockerfile. """ + from typing import Iterable from gdev.third_party.atools import memoize from gdev.cmd.gen._abc.dockerfile import GenAbcDockerfile -from .cfg import GenPreRunCfg +from gdev.cmd.gen.pre_run.cfg import GenPreRunCfg + class GenPreRunDockerfile(GenAbcDockerfile): """ @@ -43,18 +45,21 @@ def inner(env_dockerfile: GenEnvDockerfile) -> Iterable[str]: if env_dockerfile not in seen_env_dockerfiles: seen_env_dockerfiles.add(env_dockerfile) for input_env_dockerfile in env_dockerfile.get_input_dockerfiles(): - env_section_parts += inner(GenEnvDockerfile(input_env_dockerfile.options)) + env_section_parts += inner( + GenEnvDockerfile(input_env_dockerfile.options) + ) if section_lines := env_dockerfile.cfg.get_section_lines(): - env_section_parts.append(f'# {env_dockerfile}') + env_section_parts.append(f"# {env_dockerfile}") for line in section_lines: - env_section_parts.append(f'ENV {line}') + env_section_parts.append(f"ENV {line}") return env_section_parts - env_section = '\n'.join(inner(GenEnvDockerfile(self.options))) + env_section = "\n".join(inner(GenEnvDockerfile(self.options))) - self.log.debug('env_section = %s', env_section) + self.log.debug("env_section = %s", env_section) return env_section + # pylint: enable=import-outside-toplevel # pylint: disable=import-outside-toplevel @@ -72,18 +77,21 @@ def get_input_dockerfiles(self) -> Iterable[GenAbcDockerfile]: from gdev.cmd.gen.pip.dockerfile import GenPipDockerfile from gdev.cmd.gen.web.dockerfile import GenWebDockerfile - input_dockerfiles = tuple([ - GenAptDockerfile(self.options), - GenEnvDockerfile(self.options), - GenGaiaDockerfile(self.options), - GenGitDockerfile(self.options), - GenPipDockerfile(self.options), - GenWebDockerfile(self.options), - ]) + input_dockerfiles = tuple( + [ + GenAptDockerfile(self.options), + GenEnvDockerfile(self.options), + GenGaiaDockerfile(self.options), + GenGitDockerfile(self.options), + GenPipDockerfile(self.options), + GenWebDockerfile(self.options), + ] + ) - self.log.debug('input_dockerfiles = %s', input_dockerfiles) + self.log.debug("input_dockerfiles = %s", input_dockerfiles) return input_dockerfiles + # pylint: enable=import-outside-toplevel @memoize @@ -92,13 +100,10 @@ def get_run_section(self) -> str: Return text for the RUN line of the final build stage. """ if section_lines := self.cfg.get_section_lines(): - run_section = ( - 'RUN ' - + ' \\\n && '.join(section_lines) - ) + run_section = "RUN " + " \\\n && ".join(section_lines) else: - run_section = '' + run_section = "" - self.log.debug('run_section = %s', run_section) + self.log.debug("run_section = %s", run_section) return run_section diff --git a/dev_tools/docker_dev/gdev/cmd/gen/pre_run/push.py b/dev_tools/docker_dev/gdev/cmd/gen/pre_run/push.py index 43a6b15ce92e..880dd2ac5c93 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/pre_run/push.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/pre_run/push.py @@ -8,8 +8,9 @@ """ Module to satisfy the push requirements for the PRERUN section. """ -from .build import GenPreRunBuild -from .._abc.push import GenAbcPush + +from gdev.cmd.gen.pre_run.build import GenPreRunBuild +from gdev.cmd.gen._abc.push import GenAbcPush class GenPreRunPush(GenAbcPush): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/pre_run/run.py b/dev_tools/docker_dev/gdev/cmd/gen/pre_run/run.py index 36c13191bb75..a74d80cadde9 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/pre_run/run.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/pre_run/run.py @@ -8,8 +8,9 @@ """ Module to satisfy the run requirements for the PRERUN section. """ -from .build import GenPreRunBuild -from .._abc.run import GenAbcRun + +from gdev.cmd.gen.pre_run.build import GenPreRunBuild +from gdev.cmd.gen._abc.run import GenAbcRun class GenPreRunRun(GenAbcRun): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/run/build.py b/dev_tools/docker_dev/gdev/cmd/gen/run/build.py index dc057dc62c26..a0e4f9c7f655 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/run/build.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/run/build.py @@ -8,8 +8,9 @@ """ Module to satisfy the build requirements to generate the dockerfile for the RUN section. """ -from .dockerfile import GenRunDockerfile -from .._abc.build import GenAbcBuild + +from gdev.cmd.gen.run.dockerfile import GenRunDockerfile +from gdev.cmd.gen._abc.build import GenAbcBuild class GenRunBuild(GenAbcBuild): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/run/cfg.py b/dev_tools/docker_dev/gdev/cmd/gen/run/cfg.py index 031b3a505435..f79818c904bf 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/run/cfg.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/run/cfg.py @@ -8,7 +8,8 @@ """ Module to provide a subclass of the GenAbcCfg class for the Run section. """ -from .._abc.cfg import GenAbcCfg + +from gdev.cmd.gen._abc.cfg import GenAbcCfg class GenRunCfg(GenAbcCfg): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/run/dockerfile.py b/dev_tools/docker_dev/gdev/cmd/gen/run/dockerfile.py index 592f6c75b0e6..3edb49796871 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/run/dockerfile.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/run/dockerfile.py @@ -9,11 +9,13 @@ Module to execute the steps necessary to generate a dockerfile based on the configuration. """ + from typing import Iterable from gdev.third_party.atools import memoize from gdev.cmd.gen._abc.dockerfile import GenAbcDockerfile -from .cfg import GenRunCfg +from gdev.cmd.gen.run.cfg import GenRunCfg + class GenRunDockerfile(GenAbcDockerfile): """ @@ -40,9 +42,10 @@ def get_env_section(self) -> str: env_section = GenPreRunDockerfile(self.options).get_env_section() - self.log.debug('env_section = %s', env_section) + self.log.debug("env_section = %s", env_section) return env_section + # pylint: enable=import-outside-toplevel # pylint: disable=import-outside-toplevel @@ -57,9 +60,10 @@ def get_input_dockerfiles(self) -> Iterable[GenAbcDockerfile]: input_dockerfiles = tuple([GenPreRunDockerfile(self.options)]) - self.log.debug('input_dockerfiles = %s', input_dockerfiles) + self.log.debug("input_dockerfiles = %s", input_dockerfiles) return input_dockerfiles + # pylint: enable=import-outside-toplevel @memoize @@ -68,13 +72,10 @@ def get_run_section(self) -> str: Return text for the RUN line of the final build stage. """ if section_lines := self.cfg.get_section_lines(): - run_section = ( - 'RUN ' - + ' \\\n && '.join(section_lines) - ) + run_section = "RUN " + " \\\n && ".join(section_lines) else: - run_section = '' + run_section = "" - self.log.debug('run_section = %s', run_section) + self.log.debug("run_section = %s", run_section) return run_section diff --git a/dev_tools/docker_dev/gdev/cmd/gen/run/push.py b/dev_tools/docker_dev/gdev/cmd/gen/run/push.py index c26143e51dd9..71dfab58981c 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/run/push.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/run/push.py @@ -8,8 +8,9 @@ """ Module to satisfy the push requirements for the RUN section. """ -from .build import GenRunBuild -from .._abc.push import GenAbcPush + +from gdev.cmd.gen.run.build import GenRunBuild +from gdev.cmd.gen._abc.push import GenAbcPush class GenRunPush(GenAbcPush): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/run/run.py b/dev_tools/docker_dev/gdev/cmd/gen/run/run.py index aca1656cbd8f..87dcaf8d12ca 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/run/run.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/run/run.py @@ -8,8 +8,9 @@ """ Module to satisfy the run requirements for the RUN section. """ -from .build import GenRunBuild -from .._abc.run import GenAbcRun + +from gdev.cmd.gen.run.build import GenRunBuild +from gdev.cmd.gen._abc.run import GenAbcRun class GenRunRun(GenAbcRun): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/web/build.py b/dev_tools/docker_dev/gdev/cmd/gen/web/build.py index 1650e0701609..f7608470b9fa 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/web/build.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/web/build.py @@ -8,8 +8,9 @@ """ Module to satisfy the build requirements to generate the dockerfile for the WEB section. """ -from .dockerfile import GenWebDockerfile -from .._abc.build import GenAbcBuild + +from gdev.cmd.gen.web.dockerfile import GenWebDockerfile +from gdev.cmd.gen._abc.build import GenAbcBuild class GenWebBuild(GenAbcBuild): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/web/cfg.py b/dev_tools/docker_dev/gdev/cmd/gen/web/cfg.py index e1209c874bd1..b898b3357916 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/web/cfg.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/web/cfg.py @@ -8,7 +8,8 @@ """ Module to provide a subclass of the GenAbcCfg class for the Web section. """ -from .._abc.cfg import GenAbcCfg + +from gdev.cmd.gen._abc.cfg import GenAbcCfg class GenWebCfg(GenAbcCfg): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/web/dockerfile.py b/dev_tools/docker_dev/gdev/cmd/gen/web/dockerfile.py index f045d6d2887b..cf56abf4a446 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/web/dockerfile.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/web/dockerfile.py @@ -8,9 +8,11 @@ """ Module to generate the WEB section of the dockerfile. """ + from gdev.third_party.atools import memoize from gdev.cmd.gen._abc.dockerfile import GenAbcDockerfile -from .cfg import GenWebCfg +from gdev.cmd.gen.web.cfg import GenWebCfg + class GenWebDockerfile(GenAbcDockerfile): """ @@ -29,9 +31,9 @@ def get_from_section(self) -> str: """ Return text for the FROM line of the final build stage. """ - from_section = f'FROM web_base AS {self.get_name()}' + from_section = f"FROM web_base AS {self.get_name()}" - self.log.debug('from_section = %s', from_section) + self.log.debug("from_section = %s", from_section) return from_section @@ -42,15 +44,15 @@ def get_run_section(self) -> str: """ if section_lines := self.cfg.get_section_lines(): - formatted_section_lines = ' \\\n '.join(section_lines) + formatted_section_lines = " \\\n ".join(section_lines) run_statement = ( - 'RUN wget ' + "RUN wget " + formatted_section_lines - + ' \\\n && apt-get remove --autoremove -y wget' + + " \\\n && apt-get remove --autoremove -y wget" ) else: - run_statement = '' + run_statement = "" - self.log.debug('run_statement = %s', run_statement) + self.log.debug("run_statement = %s", run_statement) return run_statement diff --git a/dev_tools/docker_dev/gdev/cmd/gen/web/push.py b/dev_tools/docker_dev/gdev/cmd/gen/web/push.py index da0a8b0ecd87..27aea6e3df22 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/web/push.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/web/push.py @@ -8,8 +8,9 @@ """ Module to satisfy the push requirements for the WEB section. """ -from .build import GenWebBuild -from .._abc.push import GenAbcPush + +from gdev.cmd.gen.web.build import GenWebBuild +from gdev.cmd.gen._abc.push import GenAbcPush class GenWebPush(GenAbcPush): diff --git a/dev_tools/docker_dev/gdev/cmd/gen/web/run.py b/dev_tools/docker_dev/gdev/cmd/gen/web/run.py index 4f0c63c8d2c8..c9b48dd43c79 100644 --- a/dev_tools/docker_dev/gdev/cmd/gen/web/run.py +++ b/dev_tools/docker_dev/gdev/cmd/gen/web/run.py @@ -8,8 +8,9 @@ """ Module to satisfy the run requirements for the WEB section. """ -from .build import GenWebBuild -from .._abc.run import GenAbcRun + +from gdev.cmd.gen.web.build import GenWebBuild +from gdev.cmd.gen._abc.run import GenAbcRun class GenWebRun(GenAbcRun): diff --git a/dev_tools/docker_dev/gdev/cmd/push.py b/dev_tools/docker_dev/gdev/cmd/push.py index 6385076a770a..2f8f491d5c66 100644 --- a/dev_tools/docker_dev/gdev/cmd/push.py +++ b/dev_tools/docker_dev/gdev/cmd/push.py @@ -8,9 +8,10 @@ """ Module to provide for the `push` subcommand entry point. """ + from gdev.dependency import Dependency from gdev.third_party.atools import memoize -from .gen.run.push import GenRunPush +from gdev.cmd.gen.run.push import GenRunPush class Push(Dependency): diff --git a/dev_tools/docker_dev/gdev/cmd/run.py b/dev_tools/docker_dev/gdev/cmd/run.py index cf03a0471f6a..cb24734cc1f9 100644 --- a/dev_tools/docker_dev/gdev/cmd/run.py +++ b/dev_tools/docker_dev/gdev/cmd/run.py @@ -11,7 +11,7 @@ from gdev.dependency import Dependency from gdev.third_party.atools import memoize -from .gen.run.run import GenRunRun +from gdev.cmd.gen.run.run import GenRunRun class Run(Dependency): diff --git a/dev_tools/docker_dev/gdev/custom/gaia_path.py b/dev_tools/docker_dev/gdev/custom/gaia_path.py index 071c72828a33..3f9621880d8d 100644 --- a/dev_tools/docker_dev/gdev/custom/gaia_path.py +++ b/dev_tools/docker_dev/gdev/custom/gaia_path.py @@ -25,6 +25,7 @@ class GaiaPath(PosixPath): """ Class to provide various pathing utilities to make things simpler. """ + _repo: GaiaPath = None def context(self) -> GaiaPath: @@ -52,7 +53,7 @@ def mixin(cls) -> GaiaPath: """ Determine the path to where the mixins are stored. """ - return cls.repo() / 'dev_tools' / 'gdev' / 'mixin' + return cls.repo() / "dev_tools" / "gdev" / "mixin" @classmethod def repo(cls) -> GaiaPath: @@ -62,35 +63,37 @@ def repo(cls) -> GaiaPath: if cls._repo is None: repo = GaiaPath( check_output( - 'git rev-parse --show-toplevel'.split(), - cwd=f'{GaiaPath(__file__).parent}' - ).decode().strip() + "git rev-parse --show-toplevel".split(), + cwd=f"{GaiaPath(__file__).parent}", + ) + .decode() + .strip() ) - log.debug('repo = %s', repo) + log.debug("repo = %s", repo) cls._repo = repo return cls._repo @classmethod - def build(cls, path: Union[GaiaPath, str] = '/') -> GaiaPath: + def build(cls, path: Union[GaiaPath, str] = "/") -> GaiaPath: """ Compute a path along the "/build" tree in the container. """ - return GaiaPath('/build') / path + return GaiaPath("/build") / path @classmethod - def source(cls, path: Union[GaiaPath, str] = '/') -> GaiaPath: + def source(cls, path: Union[GaiaPath, str] = "/") -> GaiaPath: """ Compute a path along the "/source" tree in the container. """ - return GaiaPath('/source') / path + return GaiaPath("/source") / path def write_text(self, data: str, encoding=None, errors=None) -> None: """ Write the specified text to the given file. """ - log.debug('Writing to %s, text: \n%s', self, indent(data, " ")) + log.debug("Writing to %s, text: \n%s", self, indent(data, " ")) self.parent.mkdir(parents=True, exist_ok=True) super().write_text(data, encoding=encoding, errors=errors) diff --git a/dev_tools/docker_dev/gdev/dependency.py b/dev_tools/docker_dev/gdev/dependency.py index 0cd518b45c81..58cf7f9c0121 100644 --- a/dev_tools/docker_dev/gdev/dependency.py +++ b/dev_tools/docker_dev/gdev/dependency.py @@ -30,6 +30,7 @@ from gdev.parser_structure import ParserStructure from gdev.host import Host + @dataclass(frozen=True) class Dependency: """ @@ -41,13 +42,7 @@ class Dependency: options: Options - __LOG_LEVELS = [ - "CRITICAL", - "ERROR", - "WARNING", - "INFO", - "DEBUG"] - + __LOG_LEVELS = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"] # These two classes are only present to handle the Abort exception, # and early exit in some cases. @@ -60,14 +55,15 @@ class Abort(Exception): """ Exception to know we aborted. """ + def __str__(self) -> str: - return f'Abort: {super().__str__()}' + return f"Abort: {super().__str__()}" def __hash__(self) -> int: return hash((type(self), self.options)) def __repr__(self) -> str: - return f'{type(self).__name__}({self.options.target})' + return f"{type(self).__name__}({self.options.target})" def __str__(self) -> str: return repr(self) @@ -81,16 +77,16 @@ def log(self) -> logging.Logger: Note that the @memoize decorator means that this will only be setup once for any given instance of a class. """ - log = logging.getLogger(f'{self.__module__} ({self.options.target})') + log = logging.getLogger(f"{self.__module__} ({self.options.target})") log.setLevel(self.options.log_level) log.propagate = False handler = logging.StreamHandler(sys.stderr) handler.setLevel(self.options.log_level) if handler.level > logging.DEBUG: - formatter = logging.Formatter(f'({self.options.target}) %(message)s') + formatter = logging.Formatter(f"({self.options.target}) %(message)s") else: - formatter = logging.Formatter('%(levelname)s:%(name)s %(message)s') + formatter = logging.Formatter("%(levelname)s:%(name)s %(message)s") handler.setFormatter(formatter) log.addHandler(handler) return log @@ -115,6 +111,7 @@ def get_parser() -> ArgumentParser: """ Calculate the arguments to show for the given subcommand. """ + def add_flags(parser: ArgumentParser) -> None: parser.add_argument( @@ -125,11 +122,11 @@ def add_flags(parser: ArgumentParser) -> None: ) # Dockerfile, and above - base_image_default = 'ubuntu:20.04' + base_image_default = "ubuntu:20.04" parser.add_argument( - '--base-image', + "--base-image", default=base_image_default, - help=f'Base image for build. Default: "{base_image_default}"' + help=f'Base image for build. Default: "{base_image_default}"', ) # Cfg, and above. @@ -139,101 +136,107 @@ def add_flags(parser: ArgumentParser) -> None: # NOTE: Cfg does not warn if provided --cfg-enable is not present in any gdev.cfg files cfg_enables_default = [] parser.add_argument( - '--cfg-enables', + "--cfg-enables", default=cfg_enables_default, - nargs='*', + nargs="*", help=( - f'Enable lines in gdev.cfg files gated by `enable_if`, `enable_if_any`, and' + f"Enable lines in gdev.cfg files gated by `enable_if`, `enable_if_any`, and" f' `enable_if_all` functions. Default: "{cfg_enables_default}"' - ) + ), ) # Only used when GDev is being set up. - log_level_default = 'INFO' + log_level_default = "INFO" parser.add_argument( - '--log-level', + "--log-level", default=log_level_default, choices=Dependency.__LOG_LEVELS, - help=f'Log level. Default: "{log_level_default}"' + help=f'Log level. Default: "{log_level_default}"', ) # Only used as part of "run" to force a build. parser.add_argument( - '-f', '--force', - action='store_true', - help='Force Docker to build with local changes.' + "-f", + "--force", + action="store_true", + help="Force Docker to build with local changes.", ) # Dockerfile, and above mixins_default = [] parser.add_argument( - '--mixins', + "--mixins", default=mixins_default, - nargs='*', - choices=sorted([ - directory.name - for directory in GaiaPath.mixin().iterdir() - if directory.is_dir() - ]), + nargs="*", + choices=sorted( + [ + directory.name + for directory in GaiaPath.mixin().iterdir() + if directory.is_dir() + ] + ), help=( - f'Image mixins to use when creating a container. Mixins provide dev tools and' - f' configuration from targets in ' + f"Image mixins to use when creating a container. Mixins provide dev tools and" + f" configuration from targets in " f'the "{GaiaPath.mixin().relative_to(GaiaPath.repo())}"' f' directory. Default: "{mixins_default}"' - ) + ), ) mounts_default = [] parser.add_argument( - '--mounts', + "--mounts", default=[], nargs=1, help=( - f': mounts to be created (or if already created,' - f' resumed) during `docker run`. Paths may be specified as relative paths.' - f' relative paths are relative to the host\'s current working' - f' directory. relative paths are relative to the Docker' - f' container\'s WORKDIR (AKA the build dir). Default:' + f": mounts to be created (or if already created," + f" resumed) during `docker run`. Paths may be specified as relative paths." + f" relative paths are relative to the host's current working" + f" directory. relative paths are relative to the Docker" + f" container's WORKDIR (AKA the build dir). Default:" f' "{" ".join(mounts_default)}"' - ) + ), ) # Build + platform_default = { - 'x86_64': 'amd64', - 'aarch64': 'arm64', + "x86_64": "amd64", + "aarch64": "arm64", }[platform.machine()] parser.add_argument( - '--platform', + "--platform", default=platform_default, - choices=['amd64', 'arm64'], - help=f'Platform to build upon. Default: "{platform_default}"' + choices=["amd64", "arm64"], + help=f'Platform to build upon. Default: "{platform_default}"', ) ports_default = [] parser.add_argument( - '-p', '--ports', + "-p", + "--ports", default=ports_default, - nargs='*', + nargs="*", type=int, - help=f'Ports to expose in underlying docker container. Default: "{ports_default}"' + help=f'Ports to expose in underlying docker container. Default: "{ports_default}"', ) # Build + registry_default = None parser.add_argument( - '--registry', + "--registry", default=registry_default, help=( - 'Registry to push images and query cached build stages.' - f' Default: {registry_default}' - ) + "Registry to push images and query cached build stages." + f" Default: {registry_default}" + ), ) parser.add_argument( - 'args', + "args", nargs=REMAINDER, - help='Args to be forwarded on to docker run, if applicable.' + help="Args to be forwarded on to docker run, if applicable.", ) - def inner(parser: ArgumentParser, parser_structure: ParserStructure) -> ArgumentParser: + def inner( + parser: ArgumentParser, parser_structure: ParserStructure + ) -> ArgumentParser: if not parser_structure.sub_parser_structures: add_flags(parser) parser.set_defaults( @@ -244,7 +247,9 @@ def inner(parser: ArgumentParser, parser_structure: ParserStructure) -> Argument sub_parsers = parser.add_subparsers() sub_parser_map = {} for sub_parser_structure in parser_structure.sub_parser_structures: - sub_parser_map[sub_parser_structure.command_parts[-1]] = sub_parser_structure + sub_parser_map[ + sub_parser_structure.command_parts[-1] + ] = sub_parser_structure for next_map_key in sorted(sub_parser_map.keys()): sub_parser = sub_parsers.add_parser(next_map_key) inner(sub_parser, sub_parser_map[next_map_key]) @@ -256,7 +261,8 @@ def inner(parser: ArgumentParser, parser_structure: ParserStructure) -> Argument return parser return inner( - ArgumentParser(prog='gdev'), parser_structure=Dependency.get_parser_structure() + ArgumentParser(prog="gdev"), + parser_structure=Dependency.get_parser_structure(), ) @memoize @@ -268,10 +274,10 @@ def run(self) -> None: if isabstract(self): return - if hasattr(self, 'main'): - self.log.debug('Starting %s.main', str(type(self).__name__)) + if hasattr(self, "main"): + self.log.debug("Starting %s.main", str(type(self).__name__)) self.main() - self.log.debug('Finished %s.main', str(type(self).__name__)) + self.log.debug("Finished %s.main", str(type(self).__name__)) # if self.options.log_level == 'DEBUG': # getters = [] @@ -289,42 +295,50 @@ def of_args(args: Sequence[str]) -> Dependency: """ parser = Dependency.get_parser() - autocomplete(parser, default_completer=FilesCompleter(allowednames='', directories=False)) + autocomplete( + parser, default_completer=FilesCompleter(allowednames="", directories=False) + ) parsed_args = parser.parse_args(args).__dict__ if not parsed_args: Dependency.get_parser_structure.memoize.remove() - parser.parse_args([*args, '--help']) + parser.parse_args([*args, "--help"]) sys.exit(1) - if 'dry_dock' in parsed_args: - Host.set_drydock(parsed_args['dry_dock']) - del parsed_args['dry_dock'] - - if parsed_args['args'] and parsed_args['args'][0] == '--': - parsed_args['args'] = parsed_args['args'][1:] - parsed_args['args'] = ' '.join(parsed_args['args']) - parsed_args['cfg_enables'] = frozenset([ - parsed_args['base_image'], *parsed_args['cfg_enables'], *parsed_args['mixins'] - ]) - parsed_args['mixins'] = frozenset(parsed_args['mixins']) - parsed_args['ports'] = frozenset(str(port) for port in parsed_args['ports']) + if "dry_dock" in parsed_args: + Host.set_drydock(parsed_args["dry_dock"]) + del parsed_args["dry_dock"] + + if parsed_args["args"] and parsed_args["args"][0] == "--": + parsed_args["args"] = parsed_args["args"][1:] + parsed_args["args"] = " ".join(parsed_args["args"]) + parsed_args["cfg_enables"] = frozenset( + [ + parsed_args["base_image"], + *parsed_args["cfg_enables"], + *parsed_args["mixins"], + ] + ) + parsed_args["mixins"] = frozenset(parsed_args["mixins"]) + parsed_args["ports"] = frozenset(str(port) for port in parsed_args["ports"]) mounts = [] - for mount in parsed_args['mounts']: - host_path, container_path = mount.split(':', 1) + for mount in parsed_args["mounts"]: + host_path, container_path = mount.split(":", 1) host_path, container_path = GaiaPath(host_path), GaiaPath(container_path) if not host_path.is_absolute(): host_path = host_path.absolute() if not container_path.is_absolute(): container_path = container_path.absolute().image_build() mounts.append(Mount(container_path=container_path, host_path=host_path)) - parsed_args['mounts'] = frozenset(mounts) + parsed_args["mounts"] = frozenset(mounts) - command_class = parsed_args.pop('command_class') - command_module = parsed_args.pop('command_module') + command_class = parsed_args.pop("command_class") + command_module = parsed_args.pop("command_module") - options = Options(target=f'{GaiaPath.cwd().relative_to(GaiaPath.repo())}', **parsed_args) + options = Options( + target=f"{GaiaPath.cwd().relative_to(GaiaPath.repo())}", **parsed_args + ) dependency = getattr(import_module(command_module), command_class)(options) diff --git a/dev_tools/docker_dev/gdev/host.py b/dev_tools/docker_dev/gdev/host.py index f3d496b556c6..ae29e1f4b4c3 100644 --- a/dev_tools/docker_dev/gdev/host.py +++ b/dev_tools/docker_dev/gdev/host.py @@ -20,6 +20,7 @@ log = getLogger(__name__) + class Host: """ Class to handle communications with the host system. @@ -30,6 +31,7 @@ class Host: """ Class to handle communications with the host system. """ + @staticmethod def set_drydock(value): """ @@ -46,32 +48,43 @@ def is_drydock_enabled(): @staticmethod @memoize - def __execute_sync(command: str, err_ok: bool, - capture_output: bool)-> Optional[Sequence[str]]: - - log.debug('execute_sync err_ok = %s capture_output = %s command = %s', - err_ok, capture_output, command) + def __execute_sync( + command: str, err_ok: bool, capture_output: bool + ) -> Optional[Sequence[str]]: + + log.debug( + "execute_sync err_ok = %s capture_output = %s command = %s", + err_ok, + capture_output, + command, + ) with subprocess.Popen( command.replace(" ", " ").split(" "), stdout=subprocess.PIPE if capture_output else None, - stderr=subprocess.PIPE if capture_output else None) as child_process: + stderr=subprocess.PIPE if capture_output else None, + ) as child_process: process_return_code = child_process.wait() - log.debug('execute: return_code= %s, command= %s', process_return_code, command) + log.debug( + "execute: return_code= %s, command= %s", process_return_code, command + ) if process_return_code == 0 or err_ok: if child_process.stdout is None: return tuple() - process_stdout_output = \ - "".join(io.TextIOWrapper(child_process.stdout, encoding="utf-8")) + process_stdout_output = "".join( + io.TextIOWrapper(child_process.stdout, encoding="utf-8") + ) return tuple(process_stdout_output.strip().splitlines()) if child_process.stdout is not None: - process_stdout_output = \ - "".join(io.TextIOWrapper(child_process.stdout, encoding="utf-8")) + process_stdout_output = "".join( + io.TextIOWrapper(child_process.stdout, encoding="utf-8") + ) print(process_stdout_output, file=sys.stdout) if child_process.stderr is not None: - process_stderr_output = \ - "".join(io.TextIOWrapper(child_process.stderr, encoding="utf-8")) + process_stderr_output = "".join( + io.TextIOWrapper(child_process.stderr, encoding="utf-8") + ) print(process_stderr_output, file=sys.stderr) sys.exit(process_return_code) @@ -86,7 +99,9 @@ def execute_sync(command: str, *, err_ok: bool = False) -> None: Host.__execute_sync(capture_output=False, command=command, err_ok=err_ok) @staticmethod - def execute_and_get_lines_sync(command: str, *, err_ok: bool = False) -> Sequence[str]: + def execute_and_get_lines_sync( + command: str, *, err_ok: bool = False + ) -> Sequence[str]: """ Execute the specified command string and capture the output. """ @@ -100,6 +115,8 @@ def execute_and_get_line_sync(command: str, *, err_ok: bool = False) -> str: if Host.is_drydock_enabled() and command.startswith("docker image inspect"): lines = [""] else: - lines = Host.__execute_sync(capture_output=True, command=command, err_ok=err_ok) - assert len(lines) == 1, f'Must contain one line: {lines = }' + lines = Host.__execute_sync( + capture_output=True, command=command, err_ok=err_ok + ) + assert len(lines) == 1, f"Must contain one line: {lines = }" return lines[0] diff --git a/dev_tools/docker_dev/gdev/main.py b/dev_tools/docker_dev/gdev/main.py index fe7eb0e4acb6..73f75dd008f0 100644 --- a/dev_tools/docker_dev/gdev/main.py +++ b/dev_tools/docker_dev/gdev/main.py @@ -13,11 +13,13 @@ import logging from gdev.dependency import Dependency + # pylint: disable=too-few-public-methods -class DockerDev(): +class DockerDev: """ Class to provide a single entry point from the operating system. """ + @staticmethod def main(): """ @@ -30,11 +32,13 @@ def main(): try: dependency.cli_entrypoint() except dependency.Exception as this_exception: - print(f'\n{this_exception}', file=sys.stderr) + print(f"\n{this_exception}", file=sys.stderr) finally: logging.shutdown() return 0 + + # pylint: enable=too-few-public-methods if __name__ == "__main__": diff --git a/dev_tools/docker_dev/gdev/mount.py b/dev_tools/docker_dev/gdev/mount.py index 7c3312bde282..33e742636b9a 100644 --- a/dev_tools/docker_dev/gdev/mount.py +++ b/dev_tools/docker_dev/gdev/mount.py @@ -12,10 +12,12 @@ from dataclasses import dataclass from gdev.custom.gaia_path import GaiaPath + @dataclass(frozen=True) class Mount: """ Class to represent a mount point between the docker container and the host system. """ + container_path: GaiaPath host_path: GaiaPath diff --git a/dev_tools/docker_dev/gdev/options.py b/dev_tools/docker_dev/gdev/options.py index 6697c503ad17..3a255e1b7b18 100644 --- a/dev_tools/docker_dev/gdev/options.py +++ b/dev_tools/docker_dev/gdev/options.py @@ -8,16 +8,19 @@ """ Module to represent the entire collection of options available from the command line. """ + from dataclasses import dataclass from typing import FrozenSet from gdev.mount import Mount + # pylint: disable=too-many-instance-attributes @dataclass(frozen=True) class Options: """ Class to encapsulate the options related to the command line. """ + args: str base_image: str cfg_enables: FrozenSet[str] @@ -29,4 +32,6 @@ class Options: ports: FrozenSet[str] registry: str target: str + + # pylint: enable=too-many-instance-attributes diff --git a/dev_tools/docker_dev/gdev/parser_structure.py b/dev_tools/docker_dev/gdev/parser_structure.py index 30046e1b2c34..ceccd3df5b1c 100644 --- a/dev_tools/docker_dev/gdev/parser_structure.py +++ b/dev_tools/docker_dev/gdev/parser_structure.py @@ -8,6 +8,7 @@ """ Module to provide a description of the structure to be constructed. """ + from __future__ import annotations from typing import FrozenSet, Set, Tuple from importlib import import_module @@ -17,7 +18,6 @@ from pkgutil import iter_modules - @dataclass(frozen=True) class ParserStructure: """ @@ -35,18 +35,20 @@ def get_command_class(self) -> str: """ Name of the class that contains the procedure to enact. """ - return ''.join([ - command_part.capitalize() - for command_part in self.command_parts - for command_part in command_part.split('_') - if command_part - ]) + return "".join( + [ + command_part.capitalize() + for command_part in self.command_parts + for command_part in command_part.split("_") + if command_part + ] + ) def get_command_module(self) -> str: """ Name of the module that contains the procedure to enact. """ - return '.'.join(['gdev.cmd', *self.command_parts]) + return ".".join(["gdev.cmd", *self.command_parts]) @classmethod def of_command_parts(cls, command_parts: Tuple[str, ...]) -> ParserStructure: @@ -54,30 +56,32 @@ def of_command_parts(cls, command_parts: Tuple[str, ...]) -> ParserStructure: Create a parser structure out of the command parts. """ - module_name = '.'.join(['gdev.cmd', *command_parts]) + module_name = ".".join(["gdev.cmd", *command_parts]) spec = find_spec(module_name) module = import_module(module_name) if spec.submodule_search_locations is None: - command_class = ''.join([ - command_part.capitalize() - for command_part in command_parts - for command_part in command_part.split('_') - if command_part - ]) - doc = getdoc(module.__dict__[command_class]) or '' + command_class = "".join( + [ + command_part.capitalize() + for command_part in command_parts + for command_part in command_part.split("_") + if command_part + ] + ) + doc = getdoc(module.__dict__[command_class]) or "" parser_structure = ParserStructure(command_parts=command_parts, doc=doc) else: doc = getdoc(module) sub_parser_structures: Set[ParserStructure] = set() for module in iter_modules(spec.submodule_search_locations): - if not (sub_command := module.name).startswith('_'): + if not (sub_command := module.name).startswith("_"): sub_parser_structures.add( cls.of_command_parts(tuple([*command_parts, sub_command])) ) parser_structure = ParserStructure( command_parts=command_parts, doc=doc, - sub_parser_structures=frozenset(sub_parser_structures) + sub_parser_structures=frozenset(sub_parser_structures), ) return parser_structure diff --git a/dev_tools/docker_dev/gdev/third_party/argcomplete.py b/dev_tools/docker_dev/gdev/third_party/argcomplete.py index e3d0aad3c351..bbd81dce8ffe 100644 --- a/dev_tools/docker_dev/gdev/third_party/argcomplete.py +++ b/dev_tools/docker_dev/gdev/third_party/argcomplete.py @@ -16,8 +16,8 @@ except ImportError: from subprocess import check_call as _check_call - _check_call('python3 -m pip install argcomplete'.split()) + _check_call("python3 -m pip install argcomplete".split()) - from argcomplete import autocomplete - from argcomplete.completers import FilesCompleter + from argcomplete import autocomplete # noqa: F401 + from argcomplete.completers import FilesCompleter # noqa: F401 # pylint: enable=import-self, unused-import diff --git a/dev_tools/docker_dev/gdev/third_party/atools.py b/dev_tools/docker_dev/gdev/third_party/atools.py index 29c59e37aa7c..034f391d9ee5 100644 --- a/dev_tools/docker_dev/gdev/third_party/atools.py +++ b/dev_tools/docker_dev/gdev/third_party/atools.py @@ -17,9 +17,9 @@ except ImportError: from subprocess import check_call as _check_call - _check_call('python3 -m pip install atools'.split()) + _check_call("python3 -m pip install atools".split()) from atools import memoize # pylint: enable=import-self, unused-import -memoize_db = memoize(db_path=_Path.home() / '.memoize.gdev') +memoize_db = memoize(db_path=_Path.home() / ".memoize.gdev") diff --git a/dev_tools/docker_dev/test/gdev_execute.py b/dev_tools/docker_dev/test/gdev_execute.py index d04ccfd70d30..ee856da03668 100644 --- a/dev_tools/docker_dev/test/gdev_execute.py +++ b/dev_tools/docker_dev/test/gdev_execute.py @@ -20,16 +20,26 @@ from gdev.__main__ import main from gdev.main import DockerDev + # pylint: disable=too-few-public-methods -class SubprocessExecutor(): +class SubprocessExecutor: """ Class to use a subprocess to execute the tests. """ + def __init__(self, script_path=None): - self.__script_path = script_path \ - if script_path else \ - os.path.realpath(os.path.join(determine_repository_base_directory(), \ - "dev_tools", "docker_dev", "gdev.sh")) + self.__script_path = ( + script_path + if script_path + else os.path.realpath( + os.path.join( + determine_repository_base_directory(), + "dev_tools", + "docker_dev", + "gdev.sh", + ) + ) + ) def invoke_main(self, arguments=None, cwd=None): """ @@ -40,18 +50,29 @@ def invoke_main(self, arguments=None, cwd=None): arguments.insert(0, self.__script_path) cwd = cwd if cwd else determine_repository_production_directory() - with subprocess.Popen(arguments, cwd=cwd,stdout=subprocess.PIPE, - stderr=subprocess.PIPE) as new_process: + with subprocess.Popen( + arguments, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) as new_process: process_return_code = new_process.wait() - process_stdout_output = "".join(io.TextIOWrapper(new_process.stdout, encoding="utf-8")) - process_stderr_output = "".join(io.TextIOWrapper(new_process.stderr, encoding="utf-8")) + process_stdout_output = "".join( + io.TextIOWrapper(new_process.stdout, encoding="utf-8") + ) + process_stderr_output = "".join( + io.TextIOWrapper(new_process.stderr, encoding="utf-8") + ) + + return InProcessResult( + process_return_code, + io.StringIO(process_stdout_output), + io.StringIO(process_stderr_output), + ) + - return InProcessResult(process_return_code, io.StringIO(process_stdout_output), \ - io.StringIO(process_stderr_output)) # pylint: enable=too-few-public-methods + class MainlineExecutor(InProcessExecution): """ Class to provide a local instance of a InProcessExecution class. @@ -78,6 +99,7 @@ def get_main_name(self): """ return self.__entry_point + def determine_repository_base_directory(): """ Determine the location of the repository's base directory. @@ -86,31 +108,44 @@ def determine_repository_base_directory(): base_directory = os.path.realpath(os.path.join(script_directory, "..", "..", "..")) return base_directory + def determine_repository_production_directory(): """ Determine the location of the repository's production directory. """ production_directory = os.path.realpath( - os.path.join(determine_repository_base_directory(), "production")) + os.path.join(determine_repository_base_directory(), "production") + ) return production_directory + def determine_old_script_behavior(gdev_arguments): """ Determine the behavior of the old gdev script. """ original_gdev_script_path = os.path.realpath( - os.path.join(determine_repository_base_directory(), "dev_tools", "gdev", "gdev.sh")) - #executor = SubprocessExecutor(original_gdev_script_path) + os.path.join( + determine_repository_base_directory(), "dev_tools", "gdev", "gdev.sh" + ) + ) + # executor = SubprocessExecutor(original_gdev_script_path) arguments_to_use = [original_gdev_script_path] arguments_to_use.extend(gdev_arguments) - with subprocess.Popen(arguments_to_use, \ - cwd=determine_repository_production_directory(),stdout=subprocess.PIPE, - stderr=subprocess.PIPE) as new_process: + with subprocess.Popen( + arguments_to_use, + cwd=determine_repository_production_directory(), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) as new_process: process_return_code = new_process.wait() - process_stdout_output = "".join(io.TextIOWrapper(new_process.stdout, encoding="utf-8")) - process_stderr_output = "".join(io.TextIOWrapper(new_process.stderr, encoding="utf-8")) + process_stdout_output = "".join( + io.TextIOWrapper(new_process.stdout, encoding="utf-8") + ) + process_stderr_output = "".join( + io.TextIOWrapper(new_process.stderr, encoding="utf-8") + ) return process_return_code, process_stdout_output, process_stderr_output diff --git a/dev_tools/docker_dev/test/pytest_execute.py b/dev_tools/docker_dev/test/pytest_execute.py index f4939aabd4df..d3c91fa6f050 100644 --- a/dev_tools/docker_dev/test/pytest_execute.py +++ b/dev_tools/docker_dev/test/pytest_execute.py @@ -11,6 +11,7 @@ This code copied from: https://github.com/jackdewinter/pyscan. Any changes made to this code above the base code are copyright by Gaia Platform LLC. """ + import difflib import io import logging @@ -62,10 +63,10 @@ def compare_versus_expected( diff_values = "\n".join(list(diff)) print(diff_values, file=sys.stderr) if not was_found: - assert ( - False - ), f"Block\n---\n{next_text_block}\n---\nwas not found in" + \ - f"\n---\n{actual_stream.getvalue()}" + assert False, ( + f"Block\n---\n{next_text_block}\n---\nwas not found in" + + f"\n---\n{actual_stream.getvalue()}" + ) elif actual_stream.getvalue().strip() != expected_text.strip(): diff = difflib.ndiff( expected_text.splitlines(), actual_stream.getvalue().splitlines() @@ -161,10 +162,10 @@ def assert_results( not self.__std_err.getvalue() ), f"Expected stderr to be empty, not: {self.__std_err.getvalue()}" - assert ( - self.__return_code == error_code - ), f"Actual error code ({self.__return_code}) and " + \ - f"expected error code ({error_code}) differ." + assert self.__return_code == error_code, ( + f"Actual error code ({self.__return_code}) and " + + f"expected error code ({error_code}) differ." + ) finally: self.__std_out.close() @@ -226,10 +227,10 @@ def assert_resultant_file(cls, file_path, expected_contents): if are_different: diff = difflib.ndiff(split_actual_contents, split_expected_contents) diff_values = "\n".join(list(diff)) - assert ( - False - ), f"Actual and expected contents of '{file_path}' are not equal:" + \ - f"\n---\n{diff_values}\n---\n" + assert False, ( + f"Actual and expected contents of '{file_path}' are not equal:" + + f"\n---\n{diff_values}\n---\n" + ) # pylint: disable=too-few-public-methods diff --git a/dev_tools/docker_dev/test/test_scenario_help.py b/dev_tools/docker_dev/test/test_scenario_help.py new file mode 100644 index 000000000000..3d394a1c2d02 --- /dev/null +++ b/dev_tools/docker_dev/test/test_scenario_help.py @@ -0,0 +1,227 @@ +#!/usr/bin/env python3 + +############################################# +# Copyright (c) Gaia Platform LLC +# All rights reserved. +############################################# + +""" +Module to provide high level scenario tests for the Docker_Dev project. +""" + +from test.gdev_execute import ( + determine_old_script_behavior, + determine_repository_production_directory, + SubprocessExecutor, +) + + +def get_executor(): + """ + Get the executor to use for invoking the Gdev application for testing. + """ + return SubprocessExecutor() + + +def test_show_help_x(): + """ + Make sure that we can show help about the various things to do. + """ + + # Arrange + executor = get_executor() + suppplied_arguments = ["--help"] + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) + expected_output = expected_output.replace( + "\nGaiaPlatform build and development environment tool.\n", "" + ) + + # Act + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) + + # Assert + execute_results.assert_results( + expected_output, expected_error, expected_return_code + ) + + +def test_show_help_build(): + """ + Make sure that we can show help about the build task. + """ + + # Arrange + executor = get_executor() + suppplied_arguments = ["build", "--help"] + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) + + # Act + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) + + # Assert + expected_output = expected_output.replace( + "Dependency(options: 'Options')", + "Class to provide for the `build` subcommand entry point.1", + ) + execute_results.assert_results( + expected_output, expected_error, expected_return_code + ) + + +def test_show_help_cfg(): + """ + Make sure that we can show help about the cfg task. + """ + + # Arrange + executor = get_executor() + suppplied_arguments = ["cfg", "--help"] + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) + + # Act + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) + + # Assert + expected_output = expected_output.replace( + "Parse gdev.cfg for build rules.", + "Class to provide for the `cfg` subcommand entry point.1", + ) + execute_results.assert_results( + expected_output, expected_error, expected_return_code + ) + + +def test_show_help_dockerfile(): + """ + Make sure that we can show help about the dockerfile task. + """ + + # Arrange + executor = get_executor() + suppplied_arguments = ["dockerfile", "--help"] + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) + + # Act + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) + + # Assert + expected_output = expected_output.replace( + "Dependency(options: 'Options')", + "Class to provide for the `dockerfile` subcommand entry point.1", + ) + execute_results.assert_results( + expected_output, expected_error, expected_return_code + ) + + +def test_show_help_gen(): + """ + Make sure that we can show help about the gen task. + """ + + # Arrange + executor = get_executor() + suppplied_arguments = ["gen", "--help"] + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) + expected_output = expected_output.replace( + "\nInternal component commands for top-level " + + "gdev commands. These should rarely\n" + + "be needed.\n", + "", + ) + + # Act + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) + + # Assert + execute_results.assert_results( + expected_output, expected_error, expected_return_code + ) + + +def test_show_help_push(): + """ + Make sure that we can show help about the push task. + """ + + # Arrange + executor = get_executor() + suppplied_arguments = ["push", "--help"] + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) + + # Act + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) + + # Assert + expected_output = expected_output.replace( + "Dependency(options: 'Options')", + "Class to provide for the `push` subcommand entry point.1", + ) + execute_results.assert_results( + expected_output, expected_error, expected_return_code + ) + + +def test_show_help_run(): + """ + Make sure that we can show help about the run task. + """ + + # Arrange + executor = get_executor() + suppplied_arguments = ["run", "--help"] + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) + + # Act + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) + + # Assert + expected_output = expected_output.replace( + "Dependency(options: 'Options')", + "Class to provide for the `run` subcommand entry point.1", + ) + execute_results.assert_results( + expected_output, expected_error, expected_return_code + ) diff --git a/dev_tools/docker_dev/test/test_scenarios.py b/dev_tools/docker_dev/test/test_scenarios.py index 44972cb01abc..f1d6f36f6f82 100644 --- a/dev_tools/docker_dev/test/test_scenarios.py +++ b/dev_tools/docker_dev/test/test_scenarios.py @@ -9,224 +9,92 @@ Module to provide high level scenario tests for the Docker_Dev project. """ -from test.gdev_execute import determine_old_script_behavior, \ - determine_repository_production_directory, SubprocessExecutor, \ - determine_repository_base_directory +from test.gdev_execute import ( + determine_old_script_behavior, + determine_repository_production_directory, + SubprocessExecutor, + determine_repository_base_directory, +) from gdev.host import Host + def get_executor(): """ Get the executor to use for invoking the Gdev application for testing. """ return SubprocessExecutor() -def test_show_help_x(): - """ - Make sure that we can show help about the various things to do. - """ - - # Arrange - executor = get_executor() - suppplied_arguments = ["--help"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) - expected_output = expected_output.replace(\ - "\nGaiaPlatform build and development environment tool.\n", "") - - # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) - - # Assert - execute_results.assert_results( - expected_output, expected_error, expected_return_code - ) - -def test_show_help_build(): - """ - Make sure that we can show help about the build task. - """ - - # Arrange - executor = get_executor() - suppplied_arguments = ["build", "--help"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) - - # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) - - # Assert - expected_output = expected_output.replace("Dependency(options: 'Options')", \ - "Class to provide for the `build` subcommand entry point.") - execute_results.assert_results( - expected_output, expected_error, expected_return_code - ) - -def test_show_help_cfg(): - """ - Make sure that we can show help about the cfg task. - """ - - # Arrange - executor = get_executor() - suppplied_arguments = ["cfg", "--help"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) - - # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) - - # Assert - expected_output = expected_output.replace("Parse gdev.cfg for build rules.", \ - "Class to provide for the `cfg` subcommand entry point.") - execute_results.assert_results( - expected_output, expected_error, expected_return_code - ) -def test_show_help_dockerfile(): - """ - Make sure that we can show help about the dockerfile task. - """ - - # Arrange - executor = get_executor() - suppplied_arguments = ["dockerfile", "--help"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) - - # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) - - # Assert - expected_output = expected_output.replace("Dependency(options: 'Options')", \ - "Class to provide for the `dockerfile` subcommand entry point.") - execute_results.assert_results( - expected_output, expected_error, expected_return_code - ) -def test_show_help_gen(): - """ - Make sure that we can show help about the gen task. - """ - - # Arrange - executor = get_executor() - suppplied_arguments = ["gen", "--help"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) - expected_output = expected_output.replace("\nInternal component commands for top-level " + \ - "gdev commands. These should rarely\n" + \ - "be needed.\n", "") - - # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) - - # Assert - execute_results.assert_results( - expected_output, expected_error, expected_return_code - ) - -def test_show_help_push(): - """ - Make sure that we can show help about the push task. - """ - - # Arrange - executor = get_executor() - suppplied_arguments = ["push", "--help"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) - - # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) - - # Assert - expected_output = expected_output.replace("Dependency(options: 'Options')", \ - "Class to provide for the `push` subcommand entry point.") - execute_results.assert_results( - expected_output, expected_error, expected_return_code - ) - -def test_show_help_run(): - """ - Make sure that we can show help about the run task. - """ - - # Arrange - executor = get_executor() - suppplied_arguments = ["run", "--help"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) - - # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) - - # Assert - expected_output = expected_output.replace("Dependency(options: 'Options')", \ - "Class to provide for the `run` subcommand entry point.") - execute_results.assert_results( - expected_output, expected_error, expected_return_code - ) def __find_drydock_line(expected_output, drydock_prefix): assert drydock_prefix in expected_output start_index = expected_output.index(drydock_prefix) end_index = expected_output.index("]\n", start_index) - last_line = expected_output[start_index+len(drydock_prefix):end_index] + last_line = expected_output[start_index + len(drydock_prefix) : end_index] return last_line + def __find_docker_run_line(expected_output): run_line = __find_drydock_line(expected_output, "[execvpe:docker run ") print(f"run_line:{run_line}") return run_line + def __find_docker_build_line(expected_output): - build_line= __find_drydock_line(expected_output, "[execute:docker buildx build ") + build_line = __find_drydock_line(expected_output, "[execute:docker buildx build ") print(f"build_line:{build_line}") return build_line -def __find_and_remove(line_to_look_in, part_to_look_for, look_at_end=False, \ - search_for_end_whitespace=False, replace_with=None): + +def __find_and_remove( + line_to_look_in, + part_to_look_for, + look_at_end=False, + search_for_end_whitespace=False, + replace_with=None, +): if look_at_end: assert line_to_look_in.endswith(part_to_look_for) - line_to_look_in = line_to_look_in[0:-(len(part_to_look_for))] + line_to_look_in = line_to_look_in[0 : -(len(part_to_look_for))] else: part_index = line_to_look_in.index(part_to_look_for) if search_for_end_whitespace: end_index = line_to_look_in.index(" ", part_index + len(part_to_look_for)) else: end_index = part_index + len(part_to_look_for) - line_to_look_in = line_to_look_in[0:part_index] + \ - (replace_with if replace_with else "") + \ - line_to_look_in[end_index:] + line_to_look_in = ( + line_to_look_in[0:part_index] + + (replace_with if replace_with else "") + + line_to_look_in[end_index:] + ) return line_to_look_in + def __construct_base_build_line(is_using_mixins=False): - current_hash = Host.execute_and_get_line_sync('git rev-parse HEAD') + current_hash = Host.execute_and_get_line_sync("git rev-parse HEAD") + + run_type = "custom" if is_using_mixins else "run" + return ( + f"-f {determine_repository_base_directory()}/.gdev/production" + + f"/{run_type}.dockerfile.gdev " + + f'-t production__{run_type}:latest --label GitHash="{current_hash}" ' + + "--build-arg BUILDKIT_INLINE_CACHE=1 --platform linux/amd64 --shm-size 1gb " + + f"--ssh default {determine_repository_base_directory()}" + ) - run_type = 'custom' if is_using_mixins else 'run' - return f"-f {determine_repository_base_directory()}/.gdev/production" + \ - f"/{run_type}.dockerfile.gdev " + \ - f"-t production__{run_type}:latest --label GitHash=\"{current_hash}\" " + \ - "--build-arg BUILDKIT_INLINE_CACHE=1 --platform linux/amd64 --shm-size 1gb " + \ - f"--ssh default {determine_repository_base_directory()}" def __construct_base_run_command_line(is_using_mixins=False): - run_type = 'custom' if is_using_mixins else 'run' - return f"--rm --init --entrypoint /bin/bash --hostname production__{run_type} " + \ - "--platform linux/amd64 " + \ - f"--privileged --volume {determine_repository_base_directory()}:" + \ - f"/source production__{run_type}:latest" + run_type = "custom" if is_using_mixins else "run" + return ( + f"--rm --init --entrypoint /bin/bash --hostname production__{run_type} " + + "--platform linux/amd64 " + + f"--privileged --volume {determine_repository_base_directory()}:" + + f"/source production__{run_type}:latest" + ) def test_show_cfg(): @@ -237,21 +105,28 @@ def test_show_cfg(): # Arrange executor = get_executor() suppplied_arguments = ["cfg"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( expected_output, expected_error, expected_return_code ) - assert '# enable by setting "Debug"' in expected_output, \ - "Original output missing config hint." - assert '# enable by setting "GaiaLLVMTests"' in expected_output, \ - "Original output missing config hint." + assert ( + '# enable by setting "Debug"' in expected_output + ), "Original output missing config hint." + assert ( + '# enable by setting "GaiaLLVMTests"' in expected_output + ), "Original output missing config hint." + def test_show_cfg_with_cfg_enable_debug(): """ @@ -262,21 +137,28 @@ def test_show_cfg_with_cfg_enable_debug(): # Arrange executor = get_executor() suppplied_arguments = ["cfg", "--cfg-enable", "Debug"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( expected_output, expected_error, expected_return_code ) - assert '# enable by setting "Debug"' not in expected_output, \ - "Original output contains config hint." - assert '# enable by setting "GaiaLLVMTests"' in expected_output, \ - "Original output missing config hint." + assert ( + '# enable by setting "Debug"' not in expected_output + ), "Original output contains config hint." + assert ( + '# enable by setting "GaiaLLVMTests"' in expected_output + ), "Original output missing config hint." + def test_show_cfg_with_cfg_enable_debug_and_llvm(): """ @@ -287,21 +169,28 @@ def test_show_cfg_with_cfg_enable_debug_and_llvm(): # Arrange executor = get_executor() suppplied_arguments = ["cfg", "--cfg-enable", "Debug", "GaiaLLVMTests"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( expected_output, expected_error, expected_return_code ) - assert '# enable by setting "Debug"' not in expected_output, \ - "Original output contains config hint." - assert '# enable by setting "GaiaLLVMTests"' not in expected_output, \ - "Original output missing config hint." + assert ( + '# enable by setting "Debug"' not in expected_output + ), "Original output contains config hint." + assert ( + '# enable by setting "GaiaLLVMTests"' not in expected_output + ), "Original output missing config hint." + def test_generate_dockerfile(): """ @@ -311,21 +200,28 @@ def test_generate_dockerfile(): # Arrange executor = get_executor() suppplied_arguments = ["dockerfile"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( expected_output, expected_error, expected_return_code ) - assert "-DCMAKE_BUILD_TYPE=Debug" not in expected_output, \ - "Original output contains untriggered line." - assert "-DBUILD_GAIA_LLVM_TESTS=ON" not in expected_output, \ - "Original output contains untriggered line." + assert ( + "-DCMAKE_BUILD_TYPE=Debug" not in expected_output + ), "Original output contains untriggered line." + assert ( + "-DBUILD_GAIA_LLVM_TESTS=ON" not in expected_output + ), "Original output contains untriggered line." + def test_generate_dockerfile_debug(): """ @@ -339,21 +235,28 @@ def test_generate_dockerfile_debug(): # Arrange executor = get_executor() suppplied_arguments = ["dockerfile", "--cfg-enable", "Debug"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( expected_output, expected_error, expected_return_code ) - assert "-DCMAKE_BUILD_TYPE=Debug" in expected_output, \ - "Original output does not contain triggered line." - assert "-DBUILD_GAIA_LLVM_TESTS=ON" not in expected_output, \ - "Original output contains untriggered line." + assert ( + "-DCMAKE_BUILD_TYPE=Debug" in expected_output + ), "Original output does not contain triggered line." + assert ( + "-DBUILD_GAIA_LLVM_TESTS=ON" not in expected_output + ), "Original output contains untriggered line." + def test_generate_dockerfile_debug_and_llvm(): """ @@ -367,21 +270,28 @@ def test_generate_dockerfile_debug_and_llvm(): # Arrange executor = get_executor() suppplied_arguments = ["dockerfile", "--cfg-enable", "Debug", "GaiaLLVMTests"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( expected_output, expected_error, expected_return_code ) - assert "-DCMAKE_BUILD_TYPE=Debug" in expected_output, \ - "Original output does not contain triggered line." - assert "-DBUILD_GAIA_LLVM_TESTS=ON" in expected_output, \ - "Original output does not contain triggered line." + assert ( + "-DCMAKE_BUILD_TYPE=Debug" in expected_output + ), "Original output does not contain triggered line." + assert ( + "-DBUILD_GAIA_LLVM_TESTS=ON" in expected_output + ), "Original output does not contain triggered line." + def test_generate_dockerfile_debug_and_new_base_image(): """ @@ -394,22 +304,37 @@ def test_generate_dockerfile_debug_and_new_base_image(): # Arrange executor = get_executor() - suppplied_arguments = ["dockerfile", "--cfg-enable", "Debug", "--base-image", "frogger"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + suppplied_arguments = [ + "dockerfile", + "--cfg-enable", + "Debug", + "--base-image", + "frogger", + ] + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( expected_output, expected_error, expected_return_code ) - assert "frogger" in expected_output, "Original output does not contain specified base image." - assert "ubuntu:20.04" not in expected_output, "Original output contains default base image." + assert ( + "frogger" in expected_output + ), "Original output does not contain specified base image." + assert ( + "ubuntu:20.04" not in expected_output + ), "Original output contains default base image." assert "RUN groupadd -r -o -g 1000" not in expected_output + def test_generate_dockerfile_mixins_sshd(): """ Make sure that we can generate a dockerfile from the current directory, @@ -419,12 +344,16 @@ def test_generate_dockerfile_mixins_sshd(): # Arrange executor = get_executor() suppplied_arguments = ["dockerfile", "--mixins", "sshd"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -433,6 +362,7 @@ def test_generate_dockerfile_mixins_sshd(): assert "RUN groupadd -r -o -g 1000" not in expected_output assert "openssh-server" in expected_output + def test_generate_dockerfile_mixins_sshd_and_nano(): """ Make sure that we can generate a dockerfile from the current directory, @@ -442,12 +372,16 @@ def test_generate_dockerfile_mixins_sshd_and_nano(): # Arrange executor = get_executor() suppplied_arguments = ["dockerfile", "--mixins", "sshd", "nano"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -457,6 +391,7 @@ def test_generate_dockerfile_mixins_sshd_and_nano(): assert "openssh-server" in expected_output assert "nano" in expected_output + def test_generate_docker_build(): """ Make sure that we can generate a request to docker to build the image created @@ -467,12 +402,16 @@ def test_generate_docker_build(): # Arrange executor = get_executor() suppplied_arguments = ["build", "--dry-dock"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -482,6 +421,7 @@ def test_generate_docker_build(): build_line = __find_docker_build_line(expected_output) assert build_line == __construct_base_build_line() + def test_generate_docker_build_with_platform(): """ Make sure that we can generate a request to docker to build the image created @@ -491,12 +431,16 @@ def test_generate_docker_build_with_platform(): # Arrange executor = get_executor() suppplied_arguments = ["build", "--dry-dock", "--platform", "arm64"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -504,10 +448,12 @@ def test_generate_docker_build_with_platform(): ) build_line = __find_docker_build_line(expected_output) - build_line = __find_and_remove(build_line, " --platform linux/arm64",\ - replace_with=" --platform linux/amd64") + build_line = __find_and_remove( + build_line, " --platform linux/arm64", replace_with=" --platform linux/amd64" + ) assert build_line == __construct_base_build_line() + def test_generate_docker_build_with_registry(): """ Make sure that we can generate a request to docker to build the image created @@ -517,12 +463,16 @@ def test_generate_docker_build_with_registry(): # Arrange executor = get_executor() suppplied_arguments = ["build", "--dry-dock", "--registry", "localhost"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -530,10 +480,15 @@ def test_generate_docker_build_with_registry(): ) build_line = __find_docker_build_line(expected_output) - build_line = __find_and_remove(build_line, " --cache-from localhost/production__",\ - search_for_end_whitespace=True,replace_with=" ") + build_line = __find_and_remove( + build_line, + " --cache-from localhost/production__", + search_for_end_whitespace=True, + replace_with=" ", + ) assert build_line == __construct_base_build_line() + def test_generate_docker_build_with_mixins_sudo(): """ Make sure that we can generate a request to docker to build the image created @@ -543,12 +498,16 @@ def test_generate_docker_build_with_mixins_sudo(): # Arrange executor = get_executor() suppplied_arguments = ["build", "--dry-dock", "--mixins", "sudo"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -556,9 +515,10 @@ def test_generate_docker_build_with_mixins_sudo(): ) build_line = __find_docker_build_line(expected_output) - build_line = __find_and_remove(build_line, ' --label Mixins="[\'sudo\']"') + build_line = __find_and_remove(build_line, " --label Mixins=\"['sudo']\"") assert build_line == __construct_base_build_line(is_using_mixins=True) + def test_generate_docker_build_with_mixins_sudo_and_nano(): """ Make sure that we can generate a request to docker to build the image created @@ -568,12 +528,16 @@ def test_generate_docker_build_with_mixins_sudo_and_nano(): # Arrange executor = get_executor() suppplied_arguments = ["build", "--dry-dock", "--mixins", "sudo", "nano"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -581,9 +545,10 @@ def test_generate_docker_build_with_mixins_sudo_and_nano(): ) build_line = __find_docker_build_line(expected_output) - build_line = __find_and_remove(build_line, ' --label Mixins="[\'nano\',\'sudo\']"') + build_line = __find_and_remove(build_line, " --label Mixins=\"['nano','sudo']\"") assert build_line == __construct_base_build_line(is_using_mixins=True) + def test_generate_docker_run(): """ Make sure that we can generate a request to docker to run the image built @@ -594,12 +559,16 @@ def test_generate_docker_run(): # Arrange executor = get_executor() suppplied_arguments = ["run", "--dry-dock"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -622,12 +591,16 @@ def test_generate_docker_run_mixin_clion(): # Arrange executor = get_executor() suppplied_arguments = ["run", "--dry-dock", "--mixins", "clion"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -635,14 +608,17 @@ def test_generate_docker_run_mixin_clion(): ) build_line = __find_docker_build_line(expected_output) - build_line = __find_and_remove(build_line, ' --label Mixins="[\'clion\']"') + build_line = __find_and_remove(build_line, " --label Mixins=\"['clion']\"") assert build_line == __construct_base_build_line(is_using_mixins=True) run_line = __find_docker_run_line(expected_output) - run_line = __find_and_remove(run_line, \ - " -p 22:22 --cap-add=SYS_PTRACE --security-opt seccomp=unconfined --user 1000:1000") + run_line = __find_and_remove( + run_line, + " -p 22:22 --cap-add=SYS_PTRACE --security-opt seccomp=unconfined --user 1000:1000", + ) assert run_line == __construct_base_run_command_line(is_using_mixins=True) + def test_generate_docker_run_mixin_nano(): """ Make sure that we can generate a request to docker to run the image built @@ -652,12 +628,16 @@ def test_generate_docker_run_mixin_nano(): # Arrange executor = get_executor() suppplied_arguments = ["run", "--dry-dock", "--mixins", "nano"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -665,12 +645,13 @@ def test_generate_docker_run_mixin_nano(): ) build_line = __find_docker_build_line(expected_output) - build_line = __find_and_remove(build_line, ' --label Mixins="[\'nano\']"') + build_line = __find_and_remove(build_line, " --label Mixins=\"['nano']\"") assert build_line == __construct_base_build_line(is_using_mixins=True) run_line = __find_docker_run_line(expected_output) assert run_line == __construct_base_run_command_line(is_using_mixins=True) + def test_generate_docker_run_mounts(): """ Make sure that we can generate a request to docker to run the image built @@ -681,12 +662,16 @@ def test_generate_docker_run_mounts(): # Arrange executor = get_executor() suppplied_arguments = ["run", "--dry-dock", "--mount", "/root:/host"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -697,9 +682,11 @@ def test_generate_docker_run_mounts(): assert build_line == __construct_base_build_line() run_line = __find_docker_run_line(expected_output) - run_line = __find_and_remove(run_line, \ - '--mount type=volume,dst=/host,volume-driver=local,volume-opt=type=none,' + \ - 'volume-opt=o=bind,volume-opt=device=/root ') + run_line = __find_and_remove( + run_line, + "--mount type=volume,dst=/host,volume-driver=local,volume-opt=type=none," + + "volume-opt=o=bind,volume-opt=device=/root ", + ) assert run_line == __construct_base_run_command_line() @@ -713,12 +700,16 @@ def test_generate_docker_run_platform(): # Arrange executor = get_executor() suppplied_arguments = ["run", "--dry-dock", "--platform", "arm64"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -726,13 +717,15 @@ def test_generate_docker_run_platform(): ) build_line = __find_docker_build_line(expected_output) - build_line = __find_and_remove(build_line, " --platform linux/arm64", \ - replace_with=" --platform linux/amd64") + build_line = __find_and_remove( + build_line, " --platform linux/arm64", replace_with=" --platform linux/amd64" + ) assert build_line == __construct_base_build_line() run_line = __find_docker_run_line(expected_output) - run_line = __find_and_remove(run_line, " --platform linux/arm64", \ - replace_with=" --platform linux/amd64") + run_line = __find_and_remove( + run_line, " --platform linux/arm64", replace_with=" --platform linux/amd64" + ) assert run_line == __construct_base_run_command_line() @@ -745,12 +738,16 @@ def test_generate_docker_run_ports(): # Arrange executor = get_executor() suppplied_arguments = ["run", "--dry-dock", "--ports", "1234"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -775,12 +772,16 @@ def test_generate_docker_run_registry(): # Arrange executor = get_executor() suppplied_arguments = ["run", "--dry-dock", "--registry", "localhost"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -788,13 +789,17 @@ def test_generate_docker_run_registry(): ) build_line = __find_docker_build_line(expected_output) - build_line = __find_and_remove(build_line, "--cache-from localhost/production__", \ - search_for_end_whitespace=True) + build_line = __find_and_remove( + build_line, + "--cache-from localhost/production__", + search_for_end_whitespace=True, + ) assert build_line == __construct_base_build_line() run_line = __find_docker_run_line(expected_output) assert run_line == __construct_base_run_command_line() + def test_generate_docker_run_force(): """ Make sure that we can generate a request to docker to run the image built @@ -804,12 +809,16 @@ def test_generate_docker_run_force(): # Arrange executor = get_executor() suppplied_arguments = ["run", "--dry-dock", "--force"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results( @@ -822,6 +831,7 @@ def test_generate_docker_run_force(): run_line = __find_docker_run_line(expected_output) assert run_line == __construct_base_run_command_line() + def test_generate_docker_run_args(): """ Make sure that we can generate a request to docker to run the image built @@ -831,12 +841,16 @@ def test_generate_docker_run_args(): # Arrange executor = get_executor() suppplied_arguments = ["run", "--dry-dock", "not-an-argument"] - expected_return_code, expected_output, expected_error = \ - determine_old_script_behavior(suppplied_arguments) + ( + expected_return_code, + expected_output, + expected_error, + ) = determine_old_script_behavior(suppplied_arguments) # Act - execute_results = executor.invoke_main(arguments=suppplied_arguments, \ - cwd=determine_repository_production_directory()) + execute_results = executor.invoke_main( + arguments=suppplied_arguments, cwd=determine_repository_production_directory() + ) # Assert execute_results.assert_results(