diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3d79f197..d6e09f61 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,7 +4,6 @@ on: push: branches: - main - - v0.23-branch tags: - '**' pull_request: {} @@ -14,12 +13,12 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: set up python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: '3.10' + python-version: '3.11' - run: pip install -r requirements/linting.txt -r requirements/pyproject.txt pre-commit @@ -29,12 +28,12 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: set up python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: '3.10' + python-version: '3.11' - run: pip install -r requirements/docs.txt -r requirements/pyproject.txt - run: pip install . @@ -42,7 +41,7 @@ jobs: - run: make docs - name: Store docs site - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: docs path: docs/_build/ @@ -53,13 +52,13 @@ jobs: fail-fast: false matrix: os: [ubuntu] - python: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python: ['3.8', '3.9', '3.10', '3.11', '3.12'] redis: ['5'] include: - - python: '3.10' + - python: '3.11' redis: '6' os: 'ubuntu' - - python: '3.10' + - python: '3.11' redis: '7' os: 'ubuntu' @@ -67,7 +66,7 @@ jobs: PYTHON: ${{ matrix.python }} OS: ${{ matrix.os }} - runs-on: ${{ format('{0}-latest', matrix.os) }} + runs-on: ${{ matrix.os }}-latest services: redis: @@ -77,10 +76,10 @@ jobs: options: --entrypoint redis-server steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: set up python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} @@ -90,50 +89,61 @@ jobs: - run: coverage xml - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v4 with: file: ./coverage.xml env_vars: PYTHON,OS - deploy: - name: Deploy + check: + if: always() needs: [lint, docs, test] + runs-on: ubuntu-latest + + steps: + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@release/v1 + id: all-green + with: + jobs: ${{ toJSON(needs) }} + + release: + name: Release + needs: [check] if: "success() && startsWith(github.ref, 'refs/tags/')" runs-on: ubuntu-latest + environment: release + + permissions: + id-token: write steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: get docs - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: docs path: docs/_build/ - name: set up python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: '3.10' + python-version: '3.11' - name: install - run: pip install -U twine build packaging + run: pip install -U build - name: check version id: check-version - run: python <(curl -Ls https://gist.githubusercontent.com/samuelcolvin/4e1ad439c5489e8d6478cdee3eb952ef/raw/check_version.py) - env: - VERSION_PATH: 'arq/version.py' + uses: samuelcolvin/check-python-version@v3.2 + with: + version_file_path: 'arq/version.py' - name: build run: python -m build - - run: twine check dist/* - - - name: upload to pypi - run: twine upload dist/* - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.pypi_token }} + - name: Upload package to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 - name: publish docs if: '!fromJSON(steps.check-version.outputs.IS_PRERELEASE)' diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index aa774cdf..2d77e128 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,9 +11,9 @@ repos: - repo: local hooks: - - id: lint - name: Lint - entry: make lint + - id: format + name: Format + entry: make format types: [python] language: system pass_filenames: false diff --git a/Makefile b/Makefile index f0c2d044..f5417c05 100644 --- a/Makefile +++ b/Makefile @@ -1,24 +1,36 @@ .DEFAULT_GOAL := all -isort = isort arq tests -black = black arq tests +sources = arq tests .PHONY: install install: - pip install -U pip pre-commit + pip install -U pip pre-commit pip-tools pip install -r requirements/all.txt pip install -e .[watch] pre-commit install +.PHONY: refresh-lockfiles +refresh-lockfiles: + find requirements/ -name '*.txt' ! -name 'all.txt' -type f -delete + make update-lockfiles + +.PHONY: update-lockfiles +update-lockfiles: + @echo "Updating requirements/*.txt files using pip-compile" + pip-compile -q --strip-extras -o requirements/linting.txt requirements/linting.in + pip-compile -q --strip-extras -o requirements/testing.txt requirements/testing.in + pip-compile -q --strip-extras -o requirements/docs.txt requirements/docs.in + pip-compile -q --strip-extras -o requirements/pyproject.txt pyproject.toml --all-extras + pip install --dry-run -r requirements/all.txt + .PHONY: format format: - $(isort) - $(black) + ruff check --fix $(sources) + ruff format $(sources) .PHONY: lint lint: - flake8 --max-complexity 10 --max-line-length 120 --ignore E203,W503 arq/ tests/ - $(isort) --check-only --df - $(black) --check + ruff check $(sources) + ruff format --check $(sources) .PHONY: test test: diff --git a/arq/connections.py b/arq/connections.py index b3a91b48..ec11b8c7 100644 --- a/arq/connections.py +++ b/arq/connections.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from datetime import datetime, timedelta from operator import attrgetter -from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, Union, cast from urllib.parse import parse_qs, urlparse from uuid import uuid4 @@ -163,8 +163,8 @@ async def enqueue_job( job = serialize_job(function, args, kwargs, _job_try, enqueue_time_ms, serializer=self.job_serializer) pipe.multi() - pipe.psetex(job_key, expires_ms, job) # type: ignore[no-untyped-call] - pipe.zadd(_queue_name, {job_id: score}) # type: ignore[unused-coroutine] + pipe.psetex(job_key, expires_ms, job) + pipe.zadd(_queue_name, {job_id: score}) try: await pipe.execute() except WatchError: @@ -210,7 +210,7 @@ async def queued_jobs(self, *, queue_name: Optional[str] = None) -> List[JobDef] async def create_pool( - settings_: RedisSettings = None, + settings_: Optional[RedisSettings] = None, *, retry: int = 0, job_serializer: Optional[Serializer] = None, @@ -237,7 +237,8 @@ def pool_factory(*args: Any, **kwargs: Any) -> ArqRedis: ssl=settings.ssl, **kwargs, ) - return client.master_for(settings.sentinel_master, redis_class=ArqRedis) + redis = client.master_for(settings.sentinel_master, redis_class=ArqRedis) + return cast(ArqRedis, redis) else: pool_factory = functools.partial( @@ -288,10 +289,10 @@ def pool_factory(*args: Any, **kwargs: Any) -> ArqRedis: async def log_redis_info(redis: 'Redis[bytes]', log_func: Callable[[str], Any]) -> None: async with redis.pipeline(transaction=False) as pipe: - pipe.info(section='Server') # type: ignore[unused-coroutine] - pipe.info(section='Memory') # type: ignore[unused-coroutine] - pipe.info(section='Clients') # type: ignore[unused-coroutine] - pipe.dbsize() # type: ignore[unused-coroutine] + pipe.info(section='Server') + pipe.info(section='Memory') + pipe.info(section='Clients') + pipe.dbsize() info_server, info_memory, info_clients, key_count = await pipe.execute() redis_version = info_server.get('redis_version', '?') diff --git a/arq/jobs.py b/arq/jobs.py index d0c0a5ef..15b7231e 100644 --- a/arq/jobs.py +++ b/arq/jobs.py @@ -83,7 +83,7 @@ def __init__( self._deserializer = _deserializer async def result( - self, timeout: Optional[float] = None, *, poll_delay: float = 0.5, pole_delay: float = None + self, timeout: Optional[float] = None, *, poll_delay: float = 0.5, pole_delay: Optional[float] = None ) -> Any: """ Get the result of the job or, if the job raised an exception, reraise it. @@ -103,8 +103,8 @@ async def result( async for delay in poll(poll_delay): async with self._redis.pipeline(transaction=True) as tr: - tr.get(result_key_prefix + self.job_id) # type: ignore[unused-coroutine] - tr.zscore(self._queue_name, self.job_id) # type: ignore[unused-coroutine] + tr.get(result_key_prefix + self.job_id) + tr.zscore(self._queue_name, self.job_id) v, s = await tr.execute() if v: @@ -154,9 +154,9 @@ async def status(self) -> JobStatus: Status of the job. """ async with self._redis.pipeline(transaction=True) as tr: - tr.exists(result_key_prefix + self.job_id) # type: ignore[unused-coroutine] - tr.exists(in_progress_key_prefix + self.job_id) # type: ignore[unused-coroutine] - tr.zscore(self._queue_name, self.job_id) # type: ignore[unused-coroutine] + tr.exists(result_key_prefix + self.job_id) + tr.exists(in_progress_key_prefix + self.job_id) + tr.zscore(self._queue_name, self.job_id) is_complete, is_in_progress, score = await tr.execute() if is_complete: @@ -180,8 +180,8 @@ async def abort(self, *, timeout: Optional[float] = None, poll_delay: float = 0. job_info = await self.info() if job_info and job_info.score and job_info.score > timestamp_ms(): async with self._redis.pipeline(transaction=True) as tr: - tr.zrem(self._queue_name, self.job_id) # type: ignore[unused-coroutine] - tr.zadd(self._queue_name, {self.job_id: 1}) # type: ignore[unused-coroutine] + tr.zrem(self._queue_name, self.job_id) + tr.zadd(self._queue_name, {self.job_id: 1}) await tr.execute() await self._redis.zadd(abort_jobs_ss, {self.job_id: timestamp_ms()}) diff --git a/arq/typing.py b/arq/typing.py index bde59053..945e3370 100644 --- a/arq/typing.py +++ b/arq/typing.py @@ -19,8 +19,8 @@ if TYPE_CHECKING: - from .cron import CronJob # noqa F401 - from .worker import Function # noqa F401 + from .cron import CronJob + from .worker import Function OptionType = Union[None, Set[int], int] WEEKDAYS = 'mon', 'tues', 'wed', 'thurs', 'fri', 'sat', 'sun' diff --git a/arq/worker.py b/arq/worker.py index 2bdab0f0..4c33b677 100644 --- a/arq/worker.py +++ b/arq/worker.py @@ -41,7 +41,7 @@ ) if TYPE_CHECKING: - from .typing import SecondsTimedelta, StartupShutdown, WorkerCoroutine, WorkerSettingsType # noqa F401 + from .typing import SecondsTimedelta, StartupShutdown, WorkerCoroutine, WorkerSettingsType logger = logging.getLogger('arq.worker') no_result = object() @@ -189,8 +189,8 @@ def __init__( *, queue_name: Optional[str] = default_queue_name, cron_jobs: Optional[Sequence[CronJob]] = None, - redis_settings: RedisSettings = None, - redis_pool: ArqRedis = None, + redis_settings: Optional[RedisSettings] = None, + redis_pool: Optional[ArqRedis] = None, burst: bool = False, on_startup: Optional['StartupShutdown'] = None, on_shutdown: Optional['StartupShutdown'] = None, @@ -357,7 +357,7 @@ async def main(self) -> None: if self.on_startup: await self.on_startup(self.ctx) - async for _ in poll(self.poll_delay_s): # noqa F841 + async for _ in poll(self.poll_delay_s): await self._poll_iteration() if self.burst: @@ -405,10 +405,8 @@ async def _cancel_aborted_jobs(self) -> None: Go through job_ids in the abort_jobs_ss sorted set and cancel those tasks. """ async with self.pool.pipeline(transaction=True) as pipe: - pipe.zrange(abort_jobs_ss, start=0, end=-1) # type: ignore[unused-coroutine] - pipe.zremrangebyscore( # type: ignore[unused-coroutine] - abort_jobs_ss, min=timestamp_ms() + abort_job_max_age, max=float('inf') - ) + pipe.zrange(abort_jobs_ss, start=0, end=-1) + pipe.zremrangebyscore(abort_jobs_ss, min=timestamp_ms() + abort_job_max_age, max=float('inf')) abort_job_ids, _ = await pipe.execute() aborted: Set[str] = set() @@ -457,9 +455,7 @@ async def start_jobs(self, job_ids: List[bytes]) -> None: continue pipe.multi() - pipe.psetex( # type: ignore[no-untyped-call] - in_progress_key, int(self.in_progress_timeout_s * 1000), b'1' - ) + pipe.psetex(in_progress_key, int(self.in_progress_timeout_s * 1000), b'1') try: await pipe.execute() except (ResponseError, WatchError): @@ -475,11 +471,11 @@ async def start_jobs(self, job_ids: List[bytes]) -> None: async def run_job(self, job_id: str, score: int) -> None: # noqa: C901 start_ms = timestamp_ms() async with self.pool.pipeline(transaction=True) as pipe: - pipe.get(job_key_prefix + job_id) # type: ignore[unused-coroutine] - pipe.incr(retry_key_prefix + job_id) # type: ignore[unused-coroutine] - pipe.expire(retry_key_prefix + job_id, 88400) # type: ignore[unused-coroutine] + pipe.get(job_key_prefix + job_id) + pipe.incr(retry_key_prefix + job_id) + pipe.expire(retry_key_prefix + job_id, 88400) if self.allow_abort_jobs: - pipe.zrem(abort_jobs_ss, job_id) # type: ignore[unused-coroutine] + pipe.zrem(abort_jobs_ss, job_id) v, job_try, _, abort_job = await pipe.execute() else: v, job_try, _ = await pipe.execute() @@ -692,35 +688,35 @@ async def finish_job( if keep_in_progress is None: delete_keys += [in_progress_key] else: - tr.pexpire(in_progress_key, to_ms(keep_in_progress)) # type: ignore[unused-coroutine] + tr.pexpire(in_progress_key, to_ms(keep_in_progress)) if finish: if result_data: expire = None if keep_result_forever else result_timeout_s - tr.set(result_key_prefix + job_id, result_data, px=to_ms(expire)) # type: ignore[unused-coroutine] + tr.set(result_key_prefix + job_id, result_data, px=to_ms(expire)) delete_keys += [retry_key_prefix + job_id, job_key_prefix + job_id] - tr.zrem(abort_jobs_ss, job_id) # type: ignore[unused-coroutine] - tr.zrem(self.queue_name, job_id) # type: ignore[unused-coroutine] + tr.zrem(abort_jobs_ss, job_id) + tr.zrem(self.queue_name, job_id) elif incr_score: - tr.zincrby(self.queue_name, incr_score, job_id) # type: ignore[unused-coroutine] + tr.zincrby(self.queue_name, incr_score, job_id) if delete_keys: - tr.delete(*delete_keys) # type: ignore[unused-coroutine] + tr.delete(*delete_keys) await tr.execute() async def finish_failed_job(self, job_id: str, result_data: Optional[bytes]) -> None: async with self.pool.pipeline(transaction=True) as tr: - tr.delete( # type: ignore[unused-coroutine] + tr.delete( retry_key_prefix + job_id, in_progress_key_prefix + job_id, job_key_prefix + job_id, ) - tr.zrem(abort_jobs_ss, job_id) # type: ignore[unused-coroutine] - tr.zrem(self.queue_name, job_id) # type: ignore[unused-coroutine] + tr.zrem(abort_jobs_ss, job_id) + tr.zrem(self.queue_name, job_id) # result_data would only be None if serializing the result fails keep_result = self.keep_result_forever or self.keep_result_s > 0 if result_data is not None and keep_result: # pragma: no branch expire = 0 if self.keep_result_forever else self.keep_result_s - tr.set(result_key_prefix + job_id, result_data, px=to_ms(expire)) # type: ignore[unused-coroutine] + tr.set(result_key_prefix + job_id, result_data, px=to_ms(expire)) await tr.execute() async def heart_beat(self) -> None: @@ -883,7 +879,7 @@ def get_kwargs(settings_cls: 'WorkerSettingsType') -> Dict[str, NameError]: def create_worker(settings_cls: 'WorkerSettingsType', **kwargs: Any) -> Worker: - return Worker(**{**get_kwargs(settings_cls), **kwargs}) # type: ignore + return Worker(**{**get_kwargs(settings_cls), **kwargs}) def run_worker(settings_cls: 'WorkerSettingsType', **kwargs: Any) -> Worker: diff --git a/pyproject.toml b/pyproject.toml index 7d88ada4..d05aedd2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,11 +24,11 @@ classifiers = [ 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Clustering', @@ -38,7 +38,7 @@ classifiers = [ ] requires-python = '>=3.7' dependencies = [ - 'redis[hiredis]>=4.2.0', + 'redis[hiredis]>=4.2.0,<5', 'click>=8.0', 'typing-extensions>=4.1.0', ] @@ -76,39 +76,16 @@ exclude_lines = [ '@overload', ] -[tool.black] -color = true +[tool.ruff] line-length = 120 -target-version = ['py39'] -skip-string-normalization = true -[tool.isort] -line_length = 120 -known_third_party = 'foxglove' -multi_line_output = 3 -include_trailing_comma = true -force_grid_wrap = 0 -combine_as_imports = true -color_output = true +[tool.ruff.lint] +extend-select = ['Q', 'RUF100', 'C90', 'UP', 'I'] +flake8-quotes = {inline-quotes = 'single', multiline-quotes = 'double'} +mccabe = { max-complexity = 13 } -[tool.mypy] -show_error_codes = true -follow_imports = 'silent' -strict_optional = true -warn_redundant_casts = true -warn_unused_ignores = true -disallow_any_generics = true -check_untyped_defs = true -no_implicit_reexport = true -warn_unused_configs = true -disallow_subclassing_any = true -disallow_incomplete_defs = true -disallow_untyped_decorators = true -disallow_untyped_calls = true - -# for strict mypy: (this is the tricky one :-)) -disallow_untyped_defs = true +[tool.ruff.format] +quote-style = 'single' -# remaining arguments from `mypy --strict` which cause errors -#no_implicit_optional = true -#warn_return_any = true +[tool.mypy] +strict = true diff --git a/requirements/docs.txt b/requirements/docs.txt index 7473b36b..2ce9f8c2 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,52 +1,48 @@ # -# This file is autogenerated by pip-compile with python 3.11 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile --output-file=requirements/docs.txt requirements/docs.in +# pip-compile --output-file=requirements/docs.txt --strip-extras requirements/docs.in # -alabaster==0.7.12 +alabaster==0.7.16 # via sphinx -babel==2.10.3 +babel==2.14.0 # via sphinx -certifi==2022.12.7 +certifi==2024.2.2 # via requests -charset-normalizer==2.1.1 +charset-normalizer==3.3.2 # via requests docutils==0.19 # via sphinx -idna==3.3 +idna==3.6 # via requests imagesize==1.4.1 # via sphinx -jinja2==3.0.3 +jinja2==3.1.3 # via sphinx -markupsafe==2.1.1 +markupsafe==2.1.5 # via jinja2 -packaging==21.3 +packaging==24.0 # via sphinx -pygments==2.13.0 +pygments==2.17.2 # via sphinx -pyparsing==3.0.9 - # via packaging -pytz==2022.2.1 - # via babel -requests==2.28.1 +requests==2.31.0 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==5.1.1 +sphinx==5.3.0 # via -r requirements/docs.in -sphinxcontrib-applehelp==1.0.2 +sphinxcontrib-applehelp==1.0.8 # via sphinx -sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-devhelp==1.0.6 # via sphinx -sphinxcontrib-htmlhelp==2.0.0 +sphinxcontrib-htmlhelp==2.0.5 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-qthelp==1.0.7 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 +sphinxcontrib-serializinghtml==1.1.10 # via sphinx -urllib3==1.26.12 +urllib3==2.2.1 # via requests diff --git a/requirements/linting.in b/requirements/linting.in index ae67c86a..72cdaa0d 100644 --- a/requirements/linting.in +++ b/requirements/linting.in @@ -1,7 +1,4 @@ -black>=22,<23 -flake8>=5,<6 -flake8-quotes>=3,<4 -isort[colors]>=5,<6 -mypy<1 +ruff +mypy types-pytz -types_redis>=4.2,<4.3 +types_redis diff --git a/requirements/linting.txt b/requirements/linting.txt index faf0a6ba..873ee0c1 100644 --- a/requirements/linting.txt +++ b/requirements/linting.txt @@ -1,42 +1,28 @@ # -# This file is autogenerated by pip-compile with python 3.11 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile --output-file=requirements/linting.txt requirements/linting.in +# pip-compile --output-file=requirements/linting.txt --strip-extras requirements/linting.in # -black==22.6.0 - # via -r requirements/linting.in -click==8.1.3 - # via black -colorama==0.4.5 - # via isort -flake8==5.0.4 +cffi==1.16.0 + # via cryptography +cryptography==42.0.5 # via - # -r requirements/linting.in - # flake8-quotes -flake8-quotes==3.3.1 - # via -r requirements/linting.in -isort[colors]==5.10.1 + # types-pyopenssl + # types-redis +mypy==1.9.0 # via -r requirements/linting.in -mccabe==0.7.0 - # via flake8 -mypy==0.971 +mypy-extensions==1.0.0 + # via mypy +pycparser==2.22 + # via cffi +ruff==0.3.4 # via -r requirements/linting.in -mypy-extensions==0.4.3 - # via - # black - # mypy -pathspec==0.9.0 - # via black -platformdirs==2.5.2 - # via black -pycodestyle==2.9.1 - # via flake8 -pyflakes==2.5.0 - # via flake8 -types-pytz==2022.2.1.0 +types-pyopenssl==24.0.0.20240311 + # via types-redis +types-pytz==2024.1.0.20240203 # via -r requirements/linting.in -types-redis==4.2.8 +types-redis==4.6.0.20240311 # via -r requirements/linting.in -typing-extensions==4.3.0 +typing-extensions==4.10.0 # via mypy diff --git a/requirements/pyproject.txt b/requirements/pyproject.txt index c2c38af6..17a8bb5a 100644 --- a/requirements/pyproject.txt +++ b/requirements/pyproject.txt @@ -1,24 +1,22 @@ # -# This file is autogenerated by pip-compile with python 3.11 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile --extra=watch --output-file=requirements/pyproject.txt pyproject.toml +# pip-compile --all-extras --output-file=requirements/pyproject.txt --strip-extras pyproject.toml # -anyio==3.6.1 +anyio==4.3.0 # via watchfiles -async-timeout==4.0.2 - # via redis -click==8.1.3 +click==8.1.7 # via arq (pyproject.toml) -hiredis==2.1.0 +hiredis==2.3.2 # via redis -idna==3.3 +idna==3.6 # via anyio -redis[hiredis]==4.4.0 +redis==4.6.0 # via arq (pyproject.toml) -sniffio==1.2.0 +sniffio==1.3.1 # via anyio -typing-extensions==4.3.0 +typing-extensions==4.10.0 # via arq (pyproject.toml) -watchfiles==0.16.1 +watchfiles==0.21.0 # via arq (pyproject.toml) diff --git a/requirements/testing.in b/requirements/testing.in index 5a32ec5d..eb019d9d 100644 --- a/requirements/testing.in +++ b/requirements/testing.in @@ -1,11 +1,10 @@ -coverage[toml]>=6,<7 -dirty-equals>=0.4,<1 -msgpack>=1,<2 -pydantic>=1.9.2,<2 -pytest>=7,<8 -pytest-asyncio>=0.20.3 -pytest-mock>=3,<4 -pytest-sugar>=0.9,<1 -pytest-timeout>=2,<3 +coverage[toml] +dirty-equals +msgpack +pydantic +pytest +pytest-asyncio +pytest-mock +pytest-pretty +pytest-timeout pytz -redislite diff --git a/requirements/testing.txt b/requirements/testing.txt index a639ab8a..77e186a6 100644 --- a/requirements/testing.txt +++ b/requirements/testing.txt @@ -1,64 +1,55 @@ # -# This file is autogenerated by pip-compile with python 3.11 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: # -# pip-compile --output-file=requirements/testing.txt requirements/testing.in +# pip-compile --output-file=requirements/testing.txt --strip-extras requirements/testing.in # -async-timeout==4.0.2 - # via redis -attrs==22.1.0 - # via pytest -coverage[toml]==6.4.4 +annotated-types==0.6.0 + # via pydantic +coverage==7.4.4 # via -r requirements/testing.in -dirty-equals==0.4 +dirty-equals==0.7.1.post0 # via -r requirements/testing.in -iniconfig==1.1.1 +iniconfig==2.0.0 # via pytest -msgpack==1.0.4 +markdown-it-py==3.0.0 + # via rich +mdurl==0.1.2 + # via markdown-it-py +msgpack==1.0.8 # via -r requirements/testing.in -packaging==21.3 - # via - # pytest - # pytest-sugar -pluggy==1.0.0 +packaging==24.0 # via pytest -psutil==5.9.1 - # via redislite -py==1.11.0 +pluggy==1.4.0 # via pytest -pydantic==1.9.2 +pydantic==2.6.4 # via -r requirements/testing.in -pyparsing==3.0.9 - # via packaging -pytest==7.1.2 +pydantic-core==2.16.3 + # via pydantic +pygments==2.17.2 + # via rich +pytest==8.1.1 # via # -r requirements/testing.in # pytest-asyncio # pytest-mock - # pytest-sugar + # pytest-pretty # pytest-timeout -pytest-asyncio==0.20.3 +pytest-asyncio==0.23.6 # via -r requirements/testing.in -pytest-mock==3.8.2 +pytest-mock==3.14.0 # via -r requirements/testing.in -pytest-sugar==0.9.5 +pytest-pretty==1.2.0 # via -r requirements/testing.in -pytest-timeout==2.1.0 +pytest-timeout==2.3.1 # via -r requirements/testing.in -pytz==2022.2.1 +pytz==2024.1 # via # -r requirements/testing.in # dirty-equals -redis==4.4.0 - # via redislite -redislite==6.2.805324 - # via -r requirements/testing.in -termcolor==1.1.0 - # via pytest-sugar -tomli==2.0.1 - # via pytest -typing-extensions==4.3.0 - # via pydantic - -# The following packages are considered to be unsafe in a requirements file: -# setuptools +rich==13.7.1 + # via pytest-pretty +typing-extensions==4.10.0 + # via + # pydantic + # pydantic-core diff --git a/tests/check_tag.py b/tests/check_tag.py deleted file mode 100755 index 210b9bbc..00000000 --- a/tests/check_tag.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python3 -import os -import sys - -from arq.version import VERSION - -git_tag = os.getenv('TRAVIS_TAG') -if git_tag: - if git_tag.lower().lstrip('v') != str(VERSION).lower(): - print('✖ "TRAVIS_TAG" environment variable does not match arq.version: "%s" vs. "%s"' % (git_tag, VERSION)) - sys.exit(1) - else: - print('✓ "TRAVIS_TAG" environment variable matches arq.version: "%s" vs. "%s"' % (git_tag, VERSION)) -else: - print('✓ "TRAVIS_TAG" not defined') diff --git a/tests/conftest.py b/tests/conftest.py index 755aeec6..3b050be5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,6 @@ import msgpack import pytest -from redislite import Redis from arq.connections import ArqRedis, create_pool from arq.worker import Worker @@ -31,13 +30,6 @@ async def arq_redis(loop): await redis_.close(close_connection_pool=True) -@pytest.fixture -async def unix_socket_path(loop, tmp_path): - rdb = Redis(str(tmp_path / 'redis_test.db')) - yield rdb.socket_file - rdb.close() - - @pytest.fixture async def arq_redis_msgpack(loop): redis_ = ArqRedis( diff --git a/tests/test_jobs.py b/tests/test_jobs.py index f8f6c8c4..c30113d7 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -110,17 +110,6 @@ async def foobar(ctx, *args, **kwargs): ] -async def test_enqueue_job_with_unix_socket(worker, unix_socket_path): - """Test initializing arq_redis using a unix socket connection, and the worker using it.""" - settings = RedisSettings(unix_socket_path=unix_socket_path) - arq_redis = await create_pool(settings, default_queue_name='socket_queue') - await test_enqueue_job( - arq_redis, - lambda functions, **_: worker(functions=functions, arq_redis=arq_redis, queue_name=None), - queue_name=None, - ) - - async def test_enqueue_job_alt_queue(arq_redis: ArqRedis, worker): await test_enqueue_job(arq_redis, worker, queue_name='custom_queue') diff --git a/tests/test_utils.py b/tests/test_utils.py index e499d85f..997c137d 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -4,7 +4,7 @@ from datetime import timedelta import pytest -from pydantic import BaseModel, validator +from pydantic import BaseModel, field_validator from redis.asyncio import ConnectionError, ResponseError import arq.typing @@ -112,7 +112,7 @@ def test_redis_settings_validation(): class Settings(BaseModel): redis_settings: RedisSettings - @validator('redis_settings', always=True, pre=True) + @field_validator('redis_settings', mode='before') def parse_redis_settings(cls, v): if isinstance(v, str): return RedisSettings.from_dsn(v) @@ -129,7 +129,7 @@ def parse_redis_settings(cls, v): assert s2.redis_settings.host == 'testing.com' assert s2.redis_settings.port == 6379 - with pytest.raises(ValueError, match='1 validation error for Settings\nredis_settings -> ssl'): + with pytest.raises(ValueError, match='1 validation error for Settings\nredis_settings.ssl'): Settings(redis_settings={'ssl': 123}) s3 = Settings(redis_settings={'ssl': True}) diff --git a/tests/test_worker.py b/tests/test_worker.py index 23dd91d2..192a0d87 100644 --- a/tests/test_worker.py +++ b/tests/test_worker.py @@ -4,7 +4,7 @@ import re import signal import sys -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from unittest.mock import MagicMock import msgpack @@ -760,8 +760,12 @@ async def foo(ctx, v): assert worker.jobs_complete == 1 assert worker.jobs_retried == 0 assert worker.jobs_failed == 0 - assert 'foo(1)' in caplog.text - assert 'foo(2)' not in caplog.text + # either foo(1) or foo(2) can be run, but not both + if 'foo(1)' in caplog.text: + assert 'foo(2)' not in caplog.text + else: + assert 'foo(2)' in caplog.text + assert 'foo(1)' not in caplog.text async def test_max_bursts_dont_get(arq_redis: ArqRedis, worker): @@ -879,7 +883,9 @@ async def longfunc(ctx): caplog.set_level(logging.INFO) - job = await arq_redis.enqueue_job('longfunc', _job_id='testing', _defer_until=datetime.utcnow() + timedelta(days=1)) + job = await arq_redis.enqueue_job( + 'longfunc', _job_id='testing', _defer_until=datetime.now(timezone.utc) + timedelta(days=1) + ) worker: Worker = worker(functions=[func(longfunc, name='longfunc')], allow_abort_jobs=True, poll_delay=0.1) assert worker.jobs_complete == 0