diff --git a/.github/.release-please-manifest.json b/.github/.release-please-manifest.json index 698886db5..4d204362b 100644 --- a/.github/.release-please-manifest.json +++ b/.github/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.7.1" + ".": "4.0.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000..033a9c686 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,35 @@ +# Changelog + +## [4.0.0](https://github.com/testcontainers/testcontainers-python/compare/testcontainers-v3.7.1...testcontainers-v4.0.0) (2024-03-06) + + +### ⚠ BREAKING CHANGES + +* **compose:** implement compose v2 with improved typing ([#426](https://github.com/testcontainers/testcontainers-python/issues/426)) +* **core:** add support for `tc.host` and de-prioritise `docker:dind` ([#388](https://github.com/testcontainers/testcontainers-python/issues/388)) + +### Features + +* **build:** use poetry and organise modules ([#408](https://github.com/testcontainers/testcontainers-python/issues/408)) ([6c69583](https://github.com/testcontainers/testcontainers-python/commit/6c695835520bdcbf9824e8cefa00f7613d2a7cb9)) +* **compose:** allow running specific services in compose ([f61dcda](https://github.com/testcontainers/testcontainers-python/commit/f61dcda8bd7ea329cd3c836b6d6e2f0bd990335d)) +* **compose:** implement compose v2 with improved typing ([#426](https://github.com/testcontainers/testcontainers-python/issues/426)) ([5356caf](https://github.com/testcontainers/testcontainers-python/commit/5356caf2de056313a5b3f2805ed80e6a23b027a8)) +* **core:** add support for `tc.host` and de-prioritise `docker:dind` ([#388](https://github.com/testcontainers/testcontainers-python/issues/388)) ([2db8e6d](https://github.com/testcontainers/testcontainers-python/commit/2db8e6d123d42b57309408dd98ba9a06acc05c4b)) +* **redis:** support AsyncRedisContainer ([#442](https://github.com/testcontainers/testcontainers-python/issues/442)) ([cc4cb37](https://github.com/testcontainers/testcontainers-python/commit/cc4cb3762802dc75b0801727d8b1f1a1c56b7f50)) +* **release:** automate release via release-please ([#429](https://github.com/testcontainers/testcontainers-python/issues/429)) ([30f859e](https://github.com/testcontainers/testcontainers-python/commit/30f859eb1535acd6e93c331213426e1319ee9a47)) + + +### Bug Fixes + +* Added URLError to exceptions to wait for in elasticsearch ([0f9ad24](https://github.com/testcontainers/testcontainers-python/commit/0f9ad24f2c0df362ee15b81ce8d7d36b9f98e6e1)) +* **build:** add `pre-commit` as a dev dependency to simplify local dev and CI ([#438](https://github.com/testcontainers/testcontainers-python/issues/438)) ([1223583](https://github.com/testcontainers/testcontainers-python/commit/1223583d8fc3a1ab95441d82c7e1ece57f026fbf)) +* **build:** early exit strategy for modules ([#437](https://github.com/testcontainers/testcontainers-python/issues/437)) ([7358b49](https://github.com/testcontainers/testcontainers-python/commit/7358b4919c1010315a384a8f0fe2860e5a0ca6b4)) +* changed files breaks on main ([#422](https://github.com/testcontainers/testcontainers-python/issues/422)) ([3271357](https://github.com/testcontainers/testcontainers-python/commit/32713578dcf07f672a87818e00562b58874b4a52)) +* flaky garbage collection resulting in testing errors ([#423](https://github.com/testcontainers/testcontainers-python/issues/423)) ([b535ea2](https://github.com/testcontainers/testcontainers-python/commit/b535ea255bcaaa546f8cda7b2b17718c1cc7f3ca)) +* rabbitmq readiness probe ([#375](https://github.com/testcontainers/testcontainers-python/issues/375)) ([71cb75b](https://github.com/testcontainers/testcontainers-python/commit/71cb75b281df55ece4d5caf5d487059a7f38c34f)) +* **release:** prove that the release process updates the version ([#444](https://github.com/testcontainers/testcontainers-python/issues/444)) ([87b5873](https://github.com/testcontainers/testcontainers-python/commit/87b5873c1ec3a3e4e74742417d6068fa86cf1762)) +* test linting issue ([427c9b8](https://github.com/testcontainers/testcontainers-python/commit/427c9b841c2f6f516ec6cb74d5bd2839cb1939f4)) + + +### Documentation + +* Sphinx - Add title to each doc page ([#443](https://github.com/testcontainers/testcontainers-python/issues/443)) ([750e12a](https://github.com/testcontainers/testcontainers-python/commit/750e12a41172ce4aaf045c61dec33d318dc3c2f6)) diff --git a/core/testcontainers/compose/__init__.py b/core/testcontainers/compose/__init__.py new file mode 100644 index 000000000..9af994f30 --- /dev/null +++ b/core/testcontainers/compose/__init__.py @@ -0,0 +1,8 @@ +# flake8: noqa +from testcontainers.compose.compose import ( + ContainerIsNotRunning, + NoSuchPortExposed, + PublishedPort, + ComposeContainer, + DockerCompose, +) diff --git a/core/testcontainers/compose/compose.py b/core/testcontainers/compose/compose.py new file mode 100644 index 000000000..e72824bd1 --- /dev/null +++ b/core/testcontainers/compose/compose.py @@ -0,0 +1,406 @@ +import subprocess +from dataclasses import dataclass, field, fields +from functools import cached_property +from json import loads +from os import PathLike +from re import split +from typing import Callable, Literal, Optional, TypeVar, Union +from urllib.error import HTTPError, URLError +from urllib.request import urlopen + +from testcontainers.core.exceptions import ContainerIsNotRunning, NoSuchPortExposed +from testcontainers.core.waiting_utils import wait_container_is_ready + +_IPT = TypeVar("_IPT") + + +def _ignore_properties(cls: type[_IPT], dict_: any) -> _IPT: + """omits extra fields like @JsonIgnoreProperties(ignoreUnknown = true) + + https://gist.github.com/alexanderankin/2a4549ac03554a31bef6eaaf2eaf7fd5""" + if isinstance(dict_, cls): + return dict_ + class_fields = {f.name for f in fields(cls)} + filtered = {k: v for k, v in dict_.items() if k in class_fields} + return cls(**filtered) + + +@dataclass +class PublishedPort: + """ + Class that represents the response we get from compose when inquiring status + via `DockerCompose.get_running_containers()`. + """ + + URL: Optional[str] = None + TargetPort: Optional[str] = None + PublishedPort: Optional[str] = None + Protocol: Optional[str] = None + + +OT = TypeVar("OT") + + +def get_only_element_or_raise(array: list[OT], exception: Callable[[], Exception]) -> OT: + if len(array) != 1: + e = exception() + raise e + return array[0] + + +@dataclass +class ComposeContainer: + """ + A container class that represents a container managed by compose. + It is not a true testcontainers.core.container.DockerContainer, + but you can use the id with DockerClient to get that one too. + """ + + ID: Optional[str] = None + Name: Optional[str] = None + Command: Optional[str] = None + Project: Optional[str] = None + Service: Optional[str] = None + State: Optional[str] = None + Health: Optional[str] = None + ExitCode: Optional[str] = None + Publishers: list[PublishedPort] = field(default_factory=list) + + def __post_init__(self): + if self.Publishers: + self.Publishers = [_ignore_properties(PublishedPort, p) for p in self.Publishers] + + def get_publisher( + self, + by_port: Optional[int] = None, + by_host: Optional[str] = None, + prefer_ip_version: Literal["IPV4", "IPv6"] = "IPv4", + ) -> PublishedPort: + remaining_publishers = self.Publishers + + remaining_publishers = [r for r in remaining_publishers if self._matches_protocol(prefer_ip_version, r)] + + if by_port: + remaining_publishers = [item for item in remaining_publishers if by_port == item.TargetPort] + if by_host: + remaining_publishers = [item for item in remaining_publishers if by_host == item.URL] + if len(remaining_publishers) == 0: + raise NoSuchPortExposed(f"Could not find publisher for for service {self.Service}") + return get_only_element_or_raise( + remaining_publishers, + lambda: NoSuchPortExposed( + "get_publisher failed because there is " + f"not exactly 1 publisher for service {self.Service}" + f" when filtering by_port={by_port}, by_host={by_host}" + f" (but {len(remaining_publishers)})" + ), + ) + + @staticmethod + def _matches_protocol(prefer_ip_version, r): + return (":" in r.URL) is (prefer_ip_version == "IPv6") + + +@dataclass +class DockerCompose: + """ + Manage docker compose environments. + + Args: + context: + The docker context. It corresponds to the directory containing + the docker compose configuration file. + compose_file_name: + Optional. File name of the docker compose configuration file. + If specified, you need to also specify the overrides if any. + pull: + Pull images before launching environment. + build: + Run `docker compose build` before running the environment. + wait: + Wait for the services to be healthy + (as per healthcheck definitions in the docker compose configuration) + env_file: + Path to an '.env' file containing environment variables + to pass to docker compose. + services: + The list of services to use from this DockerCompose. + client_args: + arguments to pass to docker.from_env() + + Example: + + This example spins up chrome and firefox containers using docker compose. + + .. doctest:: + + >>> from testcontainers.compose import DockerCompose + + >>> compose = DockerCompose("compose/tests", compose_file_name="docker-compose-4.yml", + ... pull=True) + >>> with compose: + ... stdout, stderr = compose.get_logs() + >>> b"Hello from Docker!" in stdout + True + + .. code-block:: yaml + + services: + hello-world: + image: "hello-world" + """ + + context: Union[str, PathLike] + compose_file_name: Optional[Union[str, list[str]]] = None + pull: bool = False + build: bool = False + wait: bool = True + env_file: Optional[str] = None + services: Optional[list[str]] = None + + def __post_init__(self): + if isinstance(self.compose_file_name, str): + self.compose_file_name = [self.compose_file_name] + + def __enter__(self) -> "DockerCompose": + self.start() + return self + + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + self.stop() + + def docker_compose_command(self) -> list[str]: + """ + Returns command parts used for the docker compose commands + + Returns: + cmd: Docker compose command parts. + """ + return self.compose_command_property + + @cached_property + def compose_command_property(self) -> list[str]: + docker_compose_cmd = ["docker", "compose"] + if self.compose_file_name: + for file in self.compose_file_name: + docker_compose_cmd += ["-f", file] + if self.env_file: + docker_compose_cmd += ["--env-file", self.env_file] + return docker_compose_cmd + + def start(self) -> None: + """ + Starts the docker compose environment. + """ + base_cmd = self.compose_command_property or [] + + # pull means running a separate command before starting + if self.pull: + pull_cmd = [*base_cmd, "pull"] + self._call_command(cmd=pull_cmd) + + up_cmd = [*base_cmd, "up"] + + # build means modifying the up command + if self.build: + up_cmd.append("--build") + + if self.wait: + up_cmd.append("--wait") + else: + # we run in detached mode instead of blocking + up_cmd.append("--detach") + + if self.services: + up_cmd.extend(self.services) + + self._call_command(cmd=up_cmd) + + def stop(self, down=True) -> None: + """ + Stops the docker compose environment. + """ + down_cmd = self.compose_command_property[:] + if down: + down_cmd += ["down", "--volumes"] + else: + down_cmd += ["stop"] + self._call_command(cmd=down_cmd) + + def get_logs(self, *services: str) -> tuple[str, str]: + """ + Returns all log output from stdout and stderr of a specific container. + + :param services: which services to get the logs for (or omit, for all) + + Returns: + stdout: Standard output stream. + stderr: Standard error stream. + """ + logs_cmd = [*self.compose_command_property, "logs", *services] + + result = subprocess.run( + logs_cmd, + cwd=self.context, + capture_output=True, + ) + return result.stdout.decode("utf-8"), result.stderr.decode("utf-8") + + def get_containers(self, include_all=False) -> list[ComposeContainer]: + """ + Fetch information about running containers via `docker compose ps --format json`. + Available only in V2 of compose. + + Returns: + The list of running containers. + + """ + + cmd = [*self.compose_command_property, "ps", "--format", "json"] + if include_all: + cmd = [*cmd, "-a"] + result = subprocess.run(cmd, cwd=self.context, check=True, stdout=subprocess.PIPE) + stdout = split(r"\r?\n", result.stdout.decode("utf-8")) + + containers = [] + # one line per service in docker 25, single array for docker 24.0.2 + for line in stdout: + if not line: + continue + data = loads(line) + if isinstance(data, list): + containers += [_ignore_properties(ComposeContainer, d) for d in data] + else: + containers.append(_ignore_properties(ComposeContainer, data)) + + return containers + + def get_container( + self, + service_name: Optional[str] = None, + include_all: bool = False, + ) -> ComposeContainer: + if not service_name: + containers = self.get_containers(include_all=include_all) + return get_only_element_or_raise( + containers, + lambda: ContainerIsNotRunning( + "get_container failed because no service_name given " + f"and there is not exactly 1 container (but {len(containers)})" + ), + ) + + matching_containers = [ + item for item in self.get_containers(include_all=include_all) if item.Service == service_name + ] + + if not matching_containers: + raise ContainerIsNotRunning(f"{service_name} is not running in the compose context") + + return matching_containers[0] + + def exec_in_container( + self, + command: list[str], + service_name: Optional[str] = None, + ) -> tuple[str, str, int]: + """ + Executes a command in the container of one of the services. + + Args: + service_name: Name of the docker compose service to run the command in. + command: Command to execute. + + :param service_name: specify the service name + :param command: the command to run in the container + + Returns: + stdout: Standard output stream. + stderr: Standard error stream. + exit_code: The command's exit code. + """ + if not service_name: + service_name = self.get_container().Service + exec_cmd = [*self.compose_command_property, "exec", "-T", service_name, *command] + result = subprocess.run( + exec_cmd, + cwd=self.context, + capture_output=True, + check=True, + ) + + return (result.stdout.decode("utf-8"), result.stderr.decode("utf-8"), result.returncode) + + def _call_command( + self, + cmd: Union[str, list[str]], + context: Optional[str] = None, + ) -> None: + context = context or self.context + subprocess.call(cmd, cwd=context) + + def get_service_port( + self, + service_name: Optional[str] = None, + port: Optional[int] = None, + ): + """ + Returns the mapped port for one of the services. + + Parameters + ---------- + service_name: str + Name of the docker compose service + port: int + The internal port to get the mapping for + + Returns + ------- + str: + The mapped port on the host + """ + return self.get_container(service_name).get_publisher(by_port=port).PublishedPort + + def get_service_host( + self, + service_name: Optional[str] = None, + port: Optional[int] = None, + ): + """ + Returns the host for one of the services. + + Parameters + ---------- + service_name: str + Name of the docker compose service + port: int + The internal port to get the host for + + Returns + ------- + str: + The hostname for the service + """ + return self.get_container(service_name).get_publisher(by_port=port).URL + + def get_service_host_and_port( + self, + service_name: Optional[str] = None, + port: Optional[int] = None, + ): + publisher = self.get_container(service_name).get_publisher(by_port=port) + return publisher.URL, publisher.PublishedPort + + @wait_container_is_ready(HTTPError, URLError) + def wait_for(self, url: str) -> "DockerCompose": + """ + Waits for a response from a given URL. This is typically used to block until a service in + the environment has started and is responding. Note that it does not assert any sort of + return code, only check that the connection was successful. + + Args: + url: URL from one of the services in the environment to use to wait on. + """ + with urlopen(url) as response: + response.read() + return self diff --git a/core/testcontainers/core/exceptions.py b/core/testcontainers/core/exceptions.py index 8bf027630..6694e598b 100644 --- a/core/testcontainers/core/exceptions.py +++ b/core/testcontainers/core/exceptions.py @@ -16,5 +16,9 @@ class ContainerStartException(RuntimeError): pass +class ContainerIsNotRunning(RuntimeError): + pass + + class NoSuchPortExposed(RuntimeError): pass diff --git a/core/tests/compose_fixtures/basic/docker-compose.yaml b/core/tests/compose_fixtures/basic/docker-compose.yaml new file mode 100644 index 000000000..ff3f74220 --- /dev/null +++ b/core/tests/compose_fixtures/basic/docker-compose.yaml @@ -0,0 +1,10 @@ +version: '3.0' + +services: + alpine: + image: alpine:latest + init: true + command: + - sh + - -c + - 'while true; do sleep 0.1 ; date -Ins; done' diff --git a/core/tests/compose_fixtures/port_multiple/compose.yaml b/core/tests/compose_fixtures/port_multiple/compose.yaml new file mode 100644 index 000000000..65717fc4a --- /dev/null +++ b/core/tests/compose_fixtures/port_multiple/compose.yaml @@ -0,0 +1,28 @@ +version: '3.0' + +services: + alpine: + image: nginx:alpine-slim + init: true + ports: + - '81' + - '82' + - target: 80 + host_ip: 127.0.0.1 + protocol: tcp + command: + - sh + - -c + - 'd=/etc/nginx/conf.d; echo "server { listen 81; location / { return 202; } }" > $$d/81.conf && echo "server { listen 82; location / { return 204; } }" > $$d/82.conf && nginx -g "daemon off;"' + + alpine2: + image: nginx:alpine-slim + init: true + ports: + - target: 80 + host_ip: 127.0.0.1 + protocol: tcp + command: + - sh + - -c + - 'd=/etc/nginx/conf.d; echo "server { listen 81; location / { return 202; } }" > $$d/81.conf && echo "server { listen 82; location / { return 204; } }" > $$d/82.conf && nginx -g "daemon off;"' diff --git a/core/tests/compose_fixtures/port_single/compose.yaml b/core/tests/compose_fixtures/port_single/compose.yaml new file mode 100644 index 000000000..d1bf9eb45 --- /dev/null +++ b/core/tests/compose_fixtures/port_single/compose.yaml @@ -0,0 +1,14 @@ +version: '3.0' + +services: + alpine: + image: nginx:alpine-slim + init: true + ports: + - target: 80 + host_ip: 127.0.0.1 + protocol: tcp + command: + - sh + - -c + - 'nginx -g "daemon off;"' diff --git a/core/tests/test_compose.py b/core/tests/test_compose.py new file mode 100644 index 000000000..0a244220b --- /dev/null +++ b/core/tests/test_compose.py @@ -0,0 +1,243 @@ +from pathlib import Path +from re import split +from time import sleep +from typing import Union +from urllib.request import urlopen, Request + +import pytest + +from testcontainers.compose import DockerCompose, ContainerIsNotRunning, NoSuchPortExposed + +FIXTURES = Path(__file__).parent.joinpath("compose_fixtures") + + +def test_compose_no_file_name(): + basic = DockerCompose(context=FIXTURES / "basic") + assert basic.compose_file_name is None + + +def test_compose_str_file_name(): + basic = DockerCompose(context=FIXTURES / "basic", compose_file_name="docker-compose.yaml") + assert basic.compose_file_name == ["docker-compose.yaml"] + + +def test_compose_list_file_name(): + basic = DockerCompose(context=FIXTURES / "basic", compose_file_name=["docker-compose.yaml"]) + assert basic.compose_file_name == ["docker-compose.yaml"] + + +def test_compose_stop(): + basic = DockerCompose(context=FIXTURES / "basic") + basic.stop() + + +def test_compose_start_stop(): + basic = DockerCompose(context=FIXTURES / "basic") + basic.start() + basic.stop() + + +def test_compose(): + """stream-of-consciousness e2e test""" + basic = DockerCompose(context=FIXTURES / "basic") + try: + # first it does not exist + containers = basic.get_containers(include_all=True) + assert len(containers) == 0 + + # then we create it and it exists + basic.start() + containers = basic.get_containers(include_all=True) + assert len(containers) == 1 + containers = basic.get_containers() + assert len(containers) == 1 + + # test that get_container returns the same object, value assertions, etc + from_all = containers[0] + assert from_all.State == "running" + assert from_all.Service == "alpine" + + by_name = basic.get_container("alpine") + + assert by_name.Name == from_all.Name + assert by_name.Service == from_all.Service + assert by_name.State == from_all.State + assert by_name.ID == from_all.ID + + assert by_name.ExitCode == 0 + + # what if you want to get logs after it crashes: + basic.stop(down=False) + + with pytest.raises(ContainerIsNotRunning): + assert basic.get_container("alpine") is None + + # what it looks like after it exits + stopped = basic.get_container("alpine", include_all=True) + assert stopped.State == "exited" + finally: + basic.stop() + + +def test_compose_logs(): + basic = DockerCompose(context=FIXTURES / "basic") + with basic: + sleep(1) # generate some logs every 200ms + stdout, stderr = basic.get_logs() + container = basic.get_container() + + assert not stderr + assert stdout + lines = split(r"\r?\n", stdout) + + assert len(lines) > 5 # actually 10 + for line in lines[1:]: + # either the line is blank or the first column (|-separated) contains the service name + # this is a safe way to split the string + # docker changes the prefix between versions 24 and 25 + assert not line or container.Service in next(iter(line.split("|")), None) + + +# noinspection HttpUrlsUsage +def test_compose_ports(): + # fairly straight forward - can we get the right port to request it + single = DockerCompose(context=FIXTURES / "port_single") + with single: + host, port = single.get_service_host_and_port() + endpoint = f"http://{host}:{port}" + single.wait_for(endpoint) + code, response = fetch(Request(method="GET", url=endpoint)) + assert code == 200 + assert "

" in response + + +# noinspection HttpUrlsUsage +def test_compose_multiple_containers_and_ports(): + """test for the logic encapsulated in 'one' function + + assert correctness of multiple logic + """ + multiple = DockerCompose(context=FIXTURES / "port_multiple") + with multiple: + with pytest.raises(ContainerIsNotRunning) as e: + multiple.get_container() + e.match("get_container failed") + e.match("not exactly 1 container") + + assert multiple.get_container("alpine") + assert multiple.get_container("alpine2") + + a2p = multiple.get_service_port("alpine2") + assert a2p > 0 # > 1024 + + with pytest.raises(NoSuchPortExposed) as e: + multiple.get_service_port("alpine") + e.match("not exactly 1") + with pytest.raises(NoSuchPortExposed) as e: + multiple.get_container("alpine").get_publisher(by_host="example.com") + e.match("not exactly 1") + with pytest.raises(NoSuchPortExposed) as e: + multiple.get_container("alpine").get_publisher(by_host="localhost") + e.match("not exactly 1") + + try: + # this fails when ipv6 is enabled and docker is forwarding for both 4 + 6 + multiple.get_container(service_name="alpine").get_publisher(by_port=81, prefer_ip_version="IPv6") + except: # noqa + pass + + ports = [ + ( + 80, + multiple.get_service_host(service_name="alpine", port=80), + multiple.get_service_port(service_name="alpine", port=80), + ), + ( + 81, + multiple.get_service_host(service_name="alpine", port=81), + multiple.get_service_port(service_name="alpine", port=81), + ), + ( + 82, + multiple.get_service_host(service_name="alpine", port=82), + multiple.get_service_port(service_name="alpine", port=82), + ), + ] + + # test correctness of port lookup + for target, host, mapped in ports: + assert mapped, f"we have a mapped port for target port {target}" + url = f"http://{host}:{mapped}" + code, body = fetch(Request(method="GET", url=url)) + + expected_code = { + 80: 200, + 81: 202, + 82: 204, + }.get(code, None) + + if not expected_code: + continue + + message = f"response '{body}' ({code}) from url {url} should have code {expected_code}" + assert code == expected_code, message + + +# noinspection HttpUrlsUsage +def test_exec_in_container(): + """we test that we can manipulate a container via exec""" + single = DockerCompose(context=FIXTURES / "port_single") + with single: + url = f"http://{single.get_service_host()}:{single.get_service_port()}" + single.wait_for(url) + + # unchanged + code, body = fetch(url) + assert code == 200 + assert "test_exec_in_container" not in body + + # change it + single.exec_in_container( + command=["sh", "-c", 'echo "test_exec_in_container" > /usr/share/nginx/html/index.html'] + ) + + # and it is changed + code, body = fetch(url) + assert code == 200 + assert "test_exec_in_container" in body + + +# noinspection HttpUrlsUsage +def test_exec_in_container_multiple(): + """same as above, except we exec into a particular service""" + multiple = DockerCompose(context=FIXTURES / "port_multiple") + with multiple: + sn = "alpine2" # service name + host, port = multiple.get_service_host_and_port(service_name=sn) + url = f"http://{host}:{port}" + multiple.wait_for(url) + + # unchanged + code, body = fetch(url) + assert code == 200 + assert "test_exec_in_container" not in body + + # change it + multiple.exec_in_container( + command=["sh", "-c", 'echo "test_exec_in_container" > /usr/share/nginx/html/index.html'], service_name=sn + ) + + # and it is changed + code, body = fetch(url) + assert code == 200 + assert "test_exec_in_container" in body + + +def fetch(req: Union[Request, str]): + if isinstance(req, str): + req = Request(method="GET", url=req) + with urlopen(req) as res: + body = res.read().decode("utf-8") + if 200 < res.getcode() >= 400: + raise Exception(f"HTTP Error: {res.getcode()} - {res.reason}: {body}") + return res.getcode(), body diff --git a/modules/redis/testcontainers/redis/__init__.py b/modules/redis/testcontainers/redis/__init__.py index fba24be15..7a4d46613 100644 --- a/modules/redis/testcontainers/redis/__init__.py +++ b/modules/redis/testcontainers/redis/__init__.py @@ -14,6 +14,7 @@ from typing import Optional import redis +from redis.asyncio import Redis as asyncRedis from testcontainers.core.container import DockerContainer from testcontainers.core.utils import raise_for_deprecated_parameter from testcontainers.core.waiting_utils import wait_container_is_ready @@ -69,3 +70,29 @@ def start(self) -> "RedisContainer": super().start() self._connect() return self + + +class AsyncRedisContainer(RedisContainer): + """ + Redis container. + + Example + ------- + .. doctest:: + + >>> from testcontainers.redis import AsyncRedisContainer + + >>> with AsyncRedisContainer() as redis_container: + ... redis_client =await redis_container.get_async_client() + """ + + def __init__(self, image="redis:latest", port_to_expose=6379, password=None, **kwargs): + super().__init__(image, port_to_expose, password, **kwargs) + + async def get_async_client(self, **kwargs): + return await asyncRedis( + host=self.get_container_host_ip(), + port=self.get_exposed_port(self.port), + password=self.password, + **kwargs, + ) diff --git a/modules/redis/tests/test_redis.py b/modules/redis/tests/test_redis.py index 7dc56aa46..bd8e244c5 100644 --- a/modules/redis/tests/test_redis.py +++ b/modules/redis/tests/test_redis.py @@ -1,6 +1,7 @@ import time -from testcontainers.redis import RedisContainer +from testcontainers.redis import RedisContainer, AsyncRedisContainer +import pytest def test_docker_run_redis(): @@ -23,6 +24,49 @@ def test_docker_run_redis_with_password(): assert client.get("hello") == "world" +pytest.mark.usefixtures("anyio_backend") + + +@pytest.mark.parametrize("anyio_backend", ["asyncio"]) +async def test_key_set_in_async_redis(anyio_backend): + with AsyncRedisContainer() as container: + async_redis_client: redis.Redis = await container.get_async_client(decode_responses=True) + key = "key" + expected_value = 1 + await async_redis_client.set(key, expected_value) + actual_value = await async_redis_client.get(key) + assert int(actual_value) == expected_value + + +pytest.mark.usefixtures("anyio_backend") + + +@pytest.mark.parametrize("anyio_backend", ["asyncio"]) +@pytest.mark.skip(reason="Need to sort out async pub/sub") +async def test_docker_run_async_redis(anyio_backend): + config = AsyncRedisContainer() + with config as container: + client: redis.Redis = await container.get_async_client(decode_responses=True) + p = await client.pubsub() + await p.subscribe("test") + await client.publish("test", "new_msg") + msg = wait_for_message(p) + assert "data" in msg + assert b"new_msg", msg["data"] + + +pytest.mark.usefixtures("anyio_backend") + + +@pytest.mark.parametrize("anyio_backend", ["asyncio"]) +async def test_docker_run_async_redis_with_password(anyio_backend): + config = AsyncRedisContainer(password="mypass") + with config as container: + client: redis.Redis = await container.get_async_client(decode_responses=True) + await client.set("hello", "world") + assert await client.get("hello") == "world" + + def wait_for_message(pubsub, timeout=1, ignore_subscribe_messages=True): now = time.time() timeout = now + timeout diff --git a/poetry.lock b/poetry.lock index 0d457dfe0..f97690266 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "alabaster" @@ -11,6 +11,28 @@ files = [ {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] +[[package]] +name = "anyio" +version = "4.3.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + [[package]] name = "argon2-cffi" version = "23.1.0" @@ -1754,6 +1776,7 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -1762,6 +1785,8 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -2293,6 +2318,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2300,8 +2326,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2318,6 +2352,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2325,6 +2360,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2561,7 +2597,7 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, @@ -3123,4 +3159,4 @@ selenium = ["selenium"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "6c7e7a7b9269c6b071cbd92eaf4b7254b4b44b804179fec32aa66b43a1671bda" +content-hash = "f4cb027301e265217ccb581b0ddd06fe6d91319fbcfbc3d20504a1fdbc45d7b1" diff --git a/pyproject.toml b/pyproject.toml index 909d538e6..7afb4cd96 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "testcontainers" -version = "4.0.0rc2" # auto-incremented by release-please +version = "4.0.0" # auto-incremented by release-please description = "Python library for throwaway instances of anything that can run in a Docker container" authors = ["Sergey Pirogov "] maintainers = [ @@ -115,6 +115,7 @@ pytest = "7.4.3" pytest-cov = "4.1.0" sphinx = "^7.2.6" twine = "^4.0.2" +anyio = "^4.3.0" [[tool.poetry.source]] name = "PyPI"