Skip to content

Commit

Permalink
Merge branch 'main' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
balint-backmaker authored May 9, 2023
2 parents 9651c10 + 8a3a958 commit f4c3ce4
Show file tree
Hide file tree
Showing 15 changed files with 1,105 additions and 109 deletions.
6 changes: 3 additions & 3 deletions .github/PULL_REQUEST_TEMPLATE/new_container.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ You have implemented a new container and would like to contribute it? Great! Her

- [ ] Create a new feature directory and populate it with the package structure [described in the documentation](https://testcontainers-python.readthedocs.io/en/latest/#package-structure). Copying one of the existing features is likely the best way to get started.
- [ ] Implement the new feature (typically in `__init__.py`) and corresponding tests.
- [ ] Add a line `-e file:[feature name]` to `requirements.in` and run `make requirements`. This command will find any new requirements and generate lock files to ensure reproducible builds (see the [pip-tools documentation](https://pip-tools.readthedocs.io/en/latest/) for details). Then run `pip install -r requirements/[your python version].txt` to install the new requirements.
- [ ] Update the feature `README.rst` and add it to the table of contents (`toctree` directive) in the top-level `README.rst`.
- [ ] Add a line `[feature name]` to the list of components in the GitHub Action workflow in `.github/workflows/main.yml` to run tests, build, and publish your package when pushed to the `master` branch.
- [ ] Rebase your development branch on `master` (or merge `master` into your development branch).
- [ ] Add a line `[feature name]` to the list of components in the GitHub Action workflow in `.github/workflows/main.yml` to run tests, build, and publish your package when pushed to the `main` branch.
- [ ] Rebase your development branch on `main` (or merge `main` into your development branch).
- [ ] Add a line `-e file:[feature name]` to `requirements.in` and open a pull request. Opening a pull request will automatically generate lock files to ensure reproducible builds (see the [pip-tools documentation](https://pip-tools.readthedocs.io/en/latest/) for details). Finally, run `python get_requirements.py --pr=[your PR number]` to fetch the updated requirement files (the build needs to have succeeded).
8 changes: 4 additions & 4 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
name: testcontainers documentation
on:
push:
branches: [master]
branches: [main]
pull_request:
branches: [master]
branches: [main]

jobs:
build:
Expand All @@ -15,10 +15,10 @@ jobs:
with:
python-version: "3.10"
cache: pip
cache-dependency-path: requirements/3.10.txt
cache-dependency-path: requirements/ubuntu-latest-3.10.txt
- name: Install Python dependencies
run: |
pip install --upgrade pip
pip install -r requirements/3.10.txt
pip install -r requirements/ubuntu-latest-3.10.txt
- name: Build documentation
run: make docs
82 changes: 44 additions & 38 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
@@ -1,57 +1,62 @@
name: testcontainers packages
on:
push:
branches: [master]
branches: [main]
pull_request:
branches: [master]
branches: [main]

jobs:
build:
strategy:
matrix:
python-version:
- "3.7"
- "3.8"
- "3.9"
- "3.10"
- "3.11"
runtime:
- machine: ubuntu-latest
python-version: "3.7"
- machine: ubuntu-latest
python-version: "3.8"
- machine: ubuntu-latest
python-version: "3.9"
- machine: ubuntu-latest
python-version: "3.10"
- machine: ubuntu-latest
python-version: "3.11"
component:
- arangodb
- azurite
- clickhouse
- compose
- core
- elasticsearch
- google
- kafka
- keycloak
- localstack
- meta
- minio
- mongodb
- mssql
- mysql
- neo4j
- nginx
- opensearch
- oracle
- postgres
- rabbitmq
- redis
- selenium
runs-on: ubuntu-latest
- arangodb
- azurite
- clickhouse
- compose
- core
- elasticsearch
- google
- kafka
- keycloak
- localstack
- meta
- minio
- mongodb
- mssql
- mysql
- neo4j
- nginx
- opensearch
- oracle
- postgres
- rabbitmq
- redis
- selenium
runs-on: ${{ matrix.runtime.machine }}
steps:
- uses: actions/checkout@v3
- name: Setup python ${{ matrix.python-version }}
- name: Setup python ${{ matrix.runtime.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
python-version: ${{ matrix.runtime.python-version }}
cache: pip
cache-dependency-path: ${{ format('requirements/{0}.txt', matrix.python-version) }}
cache-dependency-path: ${{ format('requirements/{0}-{1}.txt', matrix.runtime.machine, matrix.runtime.python-version) }}
- name: Install Python dependencies
run: |
pip install --upgrade pip
pip install -r requirements/${{ matrix.python-version }}.txt
pip install -r requirements/${{ matrix.runtime.machine }}-${{ matrix.runtime.python-version }}.txt
- name: Run docker diagnostics
if: matrix.component == 'core'
run: |
Expand All @@ -76,9 +81,10 @@ jobs:
- name: Upload the package to pypi
if: >
github.event_name == 'push'
&& github.ref == 'refs/heads/master'
&& github.ref == 'refs/heads/main'
&& github.repository_owner == 'testcontainers'
&& matrix.python-version == '3.10'
&& matrix.runtime.python-version == '3.10'
&& matrix.runtime.machine == 'ubuntu-latest'
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
Expand Down
40 changes: 24 additions & 16 deletions .github/workflows/requirements.yml
Original file line number Diff line number Diff line change
@@ -1,35 +1,43 @@
name: testcontainers requirements
on:
push:
branches: [master]
branches: [main]
pull_request:
branches: [master]
branches: [main]

jobs:
requirements:
strategy:
fail-fast: false
matrix:
python-version:
- "3.7"
- "3.8"
- "3.9"
- "3.10"
- "3.11"
runs-on: ubuntu-latest
runtime:
- machine: ubuntu-latest
python-version: "3.7"
- machine: ubuntu-latest
python-version: "3.8"
- machine: ubuntu-latest
python-version: "3.9"
- machine: ubuntu-latest
python-version: "3.10"
- machine: ubuntu-latest
python-version: "3.11"
- machine: windows-latest
python-version: "3.10"
- machine: macos-latest
python-version: "3.10"
runs-on: ${{ matrix.runtime.machine }}
steps:
- uses: actions/checkout@v3
- name: Setup python ${{ matrix.python-version }}
- name: Setup python ${{ matrix.runtime.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
python-version: ${{ matrix.runtime.python-version }}
- name: Update pip and install pip-tools
run: pip install --upgrade pip pip-tools
- name: Build requirements
run: |
rm requirements/${{ matrix.python-version }}.txt
pip-compile --resolver=backtracking -v --upgrade -o requirements/${{ matrix.python-version }}.txt
run: pip-compile --resolver=backtracking -v --upgrade -o requirements.txt
- name: Store requirements as artifact
uses: actions/upload-artifact@v3
with:
name: requirements-${{ matrix.python-version }}.txt
path: requirements/${{ matrix.python-version }}.txt
name: requirements-${{ matrix.runtime.machine }}-${{ matrix.runtime.python-version }}.txt
path: requirements.txt
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -72,3 +72,4 @@ venv
.DS_Store
.python-version
.env
.github-token
12 changes: 2 additions & 10 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
PYTHON_VERSIONS = 3.7 3.8 3.9 3.10 3.11
PYTHON_VERSION ?= 3.10
IMAGE = testcontainers-python:${PYTHON_VERSION}
REQUIREMENTS = $(addprefix requirements/,${PYTHON_VERSIONS:=.txt})
RUN = docker run --rm -it
# Get all directories that contain a setup.py and get the directory name.
PACKAGES = $(subst /,,$(dir $(wildcard */setup.py)))
Expand All @@ -26,7 +25,7 @@ ${DISTRIBUTIONS} : %/dist : %/setup.py
# Targets to run the test suite for each package.
tests : ${TESTS}
${TESTS} : %/tests :
pytest -svx --cov-report=term-missing --cov=testcontainers.$* --tb=short $*/tests
pytest -svx --cov-report=term-missing --cov=testcontainers.$* --tb=short --strict-markers $*/tests

# Targets to lint the code.
lint : ${LINT}
Expand All @@ -43,7 +42,7 @@ ${UPLOAD} : %/upload :
fi

# Targets to build docker images
image: requirements/${PYTHON_VERSION}.txt
image: requirements/ubunut-latest-${PYTHON_VERSION}.txt
docker build --build-arg version=${PYTHON_VERSION} -t ${IMAGE} .

# Targets to run tests in docker containers
Expand All @@ -63,13 +62,6 @@ doctest : ${DOCTESTS}
${DOCTESTS} : %/doctest :
sphinx-build -b doctest -c doctests $* docs/_build

# Targets to build requirement files
requirements : ${REQUIREMENTS}
${REQUIREMENTS} : requirements/%.txt : requirements.in */setup.py
mkdir -p $(dir $@)
${RUN} -w /workspace -v `pwd`:/workspace --platform=linux/amd64 python:$* bash -c \
"pip install pip-tools && pip-compile --resolver=backtracking -v --upgrade -o $@ $<"

# Remove any generated files.
clean :
rm -rf docs/_build
Expand Down
9 changes: 1 addition & 8 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -108,11 +108,4 @@ Testcontainers is a collection of `implicit namespace packages <https://peps.pyt
Contributing a New Feature
^^^^^^^^^^^^^^^^^^^^^^^^^^

You want to contribute a new feature or container? Great! You can do that in six steps.

1. Create a new feature directory and populate it with the [package structure]_ as described above. Copying one of the existing features is likely the best way to get started.
2. Implement the new feature (typically in :code:`__init__.py`) and corresponding tests.
3. Add a line :code:`-e file:[feature name]` to :code:`requirements.in` and run :code:`make requirements`. This command will find any new requirements and generate lock files to ensure reproducible builds (see the `pip-tools <https://pip-tools.readthedocs.io/en/latest/>`__ documentation for details). Then run :code:`pip install -r requirements/[your python version].txt` to install the new requirements.
4. Update the feature :code:`README.rst` and add it to the table of contents (:code:`toctree` directive) in the top-level :code:`README.rst`.
5. Add a line :code:`[feature name]` to the list of components in the GitHub Action workflow in :code:`.github/workflows/main.yml` to run tests, build, and publish your package when pushed to the :code:`master` branch.
6. Rebase your development branch on :code:`master` (or merge :code:`master` into your development branch).
You want to contribute a new feature or container? Great! You can do that in six steps as outlined `here <https://github.com/testcontainers/testcontainers-python/blob/main/.github/PULL_REQUEST_TEMPLATE/new_container.md>__`.
94 changes: 94 additions & 0 deletions get_requirements.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
import argparse
import io
import pathlib
import requests
import shutil
import tempfile
import zipfile


def __main__() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("--owner", default="testcontainers")
parser.add_argument("--repo", default="testcontainers-python")
parser.add_argument("--run", help="GitHub Action run id")
parser.add_argument("--pr", help="GitHub PR number")
parser.add_argument("--branch", default="main")
parser.add_argument("--token", help="GitHub autentication token")
args = parser.parse_args()

# Get an access token.
if args.token:
token = args.token
elif (path := pathlib.Path(".github-token")).is_file():
token = path.read_text().strip()
else:
token = input("we need a GitHub access token to fetch the requirements; please visit "
"https://github.com/settings/tokens/new, create a token with `public_repo` "
"scope, and paste it here: ").strip()
cache = input("do you want to cache the token in a `.github-token` file [Ny]? ")
if cache.lower().startswith("y"):
path.write_text(token)

headers = {
"Authorization": f"Bearer {token}",
}
base_url = f"https://api.github.com/repos/{args.owner}/{args.repo}"

if args.run: # Run id was specified.
run = args.run
elif args.pr: # PR was specified, let's get the most recent run id.
print(f"fetching most recent commit for PR #{args.pr}")
response = requests.get(f"{base_url}/pulls/{args.pr}", headers=headers)
response.raise_for_status()
response = response.json()
head_sha = response["head"]["sha"]
else: # Nothing was specified, let's get the most recent run id on the main branch.
print(f"fetching most recent commit for branch `{args.branch}`")
response = requests.get(f"{base_url}/branches/{args.branch}", headers=headers)
response.raise_for_status()
response = response.json()
head_sha = response["commit"]["sha"]

# List all completed runs and find the one that generated the requirements.
response = requests.get(f"{base_url}/actions/runs", headers=headers, params={
"head_sha": head_sha,
"status": "success",
})
response.raise_for_status()
response = response.json()

# Get the requirements run.
runs = [run for run in response["workflow_runs"] if
run["path"].endswith("requirements.yml")]
if len(runs) != 1:
raise RuntimeError(f"could not identify unique workflow run: {runs}")
run = runs[0]["id"]

# Get all the artifacts.
print(f"fetching artifacts for run {run} ...")
url = f"{base_url}/actions/runs/{run}/artifacts"
response = requests.get(url, headers=headers)
response.raise_for_status()
response = response.json()
artifacts = response["artifacts"]
print(f"discovered {len(artifacts)} artifacts")

# Get the content for each artifact and save it.
for artifact in artifacts:
name: str = artifact["name"]
name = name.removeprefix("requirements-")
print(f"fetching artifact {name} ...")
response = requests.get(artifact["archive_download_url"], headers=headers)
response.raise_for_status()
with zipfile.ZipFile(io.BytesIO(response.content)) as zip, \
tempfile.TemporaryDirectory() as tempdir:
zip.extract("requirements.txt", tempdir)
shutil.move(pathlib.Path(tempdir) / "requirements.txt",
pathlib.Path("requirements") / name)

print("done")


if __name__ == "__main__":
__main__()
Loading

0 comments on commit f4c3ce4

Please sign in to comment.