diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 4dcef70c..f50fdf59 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -7,7 +7,7 @@ repos:
- id: check-toml
- repo: https://github.com/charliermarsh/ruff-pre-commit
- rev: v0.3.0
+ rev: v0.3.2
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 858d5daf..00014f05 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,6 +8,22 @@ Collect fragments into this file with: scriv collect --version X.Y.Z
+
+
+## 0.10.0 (2024-03-13)
+
+### New features
+
+- We've adopted Safir's `safir.fastapi.ClientRequestError` so that errors like 404 and 422 (input validation) now use the same error format as FastAPI uses for its built-in model validation errors. For parameter errors from endpoints like `GET /v1/pages:page/html` the parameter name is now part of the `loc` field in the error message.
+- Times Square and its worker now send uncaught exceptions to a Slack webhook for better error reporting. The webhook URL is set in the `TS_SLACK_WEBHOOK_URL` environment variable.
+
+### Other changes
+
+- Updated to Python 3.12
+- Updated to Pydantic 2
+- Adopted Ruff for linting and formatting, replacing black, flake8, and isort.
+- Switch to using Annotated for Pydantic models and FastAPI path functions.
+
## 0.9.2 (2023-09-21)
diff --git a/changelog.d/20240306_111612_jsick_DM_43173.md b/changelog.d/20240306_111612_jsick_DM_43173.md
deleted file mode 100644
index 8d3f9f95..00000000
--- a/changelog.d/20240306_111612_jsick_DM_43173.md
+++ /dev/null
@@ -1,6 +0,0 @@
-### Other changes
-
-- Updated to Python 3.12
-- Updated to Pydantic 2
-- Adopted Ruff for linting and formatting, replacing black, flake8, and isort.
-- Switch to using Annotated for Pydantic models and FastAPI path functions.
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 5b5832f0..b0798c57 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -492,34 +492,34 @@ mdurl==0.1.2 \
# via
# -c requirements/main.txt
# markdown-it-py
-mypy==1.8.0 \
- --hash=sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6 \
- --hash=sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d \
- --hash=sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02 \
- --hash=sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d \
- --hash=sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3 \
- --hash=sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3 \
- --hash=sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3 \
- --hash=sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66 \
- --hash=sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259 \
- --hash=sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835 \
- --hash=sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd \
- --hash=sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d \
- --hash=sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8 \
- --hash=sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07 \
- --hash=sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b \
- --hash=sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e \
- --hash=sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6 \
- --hash=sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae \
- --hash=sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9 \
- --hash=sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d \
- --hash=sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a \
- --hash=sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592 \
- --hash=sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218 \
- --hash=sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817 \
- --hash=sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4 \
- --hash=sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410 \
- --hash=sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55
+mypy==1.9.0 \
+ --hash=sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6 \
+ --hash=sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913 \
+ --hash=sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129 \
+ --hash=sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc \
+ --hash=sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974 \
+ --hash=sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374 \
+ --hash=sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150 \
+ --hash=sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03 \
+ --hash=sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9 \
+ --hash=sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02 \
+ --hash=sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89 \
+ --hash=sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2 \
+ --hash=sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d \
+ --hash=sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3 \
+ --hash=sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612 \
+ --hash=sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e \
+ --hash=sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3 \
+ --hash=sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e \
+ --hash=sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd \
+ --hash=sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04 \
+ --hash=sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed \
+ --hash=sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185 \
+ --hash=sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf \
+ --hash=sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b \
+ --hash=sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4 \
+ --hash=sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f \
+ --hash=sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6
# via
# -r requirements/dev.in
# sqlalchemy
@@ -535,9 +535,9 @@ nodeenv==1.8.0 \
--hash=sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2 \
--hash=sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec
# via pre-commit
-packaging==23.2 \
- --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \
- --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7
+packaging==24.0 \
+ --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \
+ --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9
# via
# -c requirements/main.txt
# pydata-sphinx-theme
@@ -677,17 +677,17 @@ pygments==2.17.2 \
pylatexenc==2.10 \
--hash=sha256:3dd8fd84eb46dc30bee1e23eaab8d8fb5a7f507347b23e5f38ad9675c84f40d3
# via documenteer
-pytest==8.0.2 \
- --hash=sha256:d4051d623a2e0b7e51960ba963193b09ce6daeb9759a451844a21e4ddedfc1bd \
- --hash=sha256:edfaaef32ce5172d5466b5127b42e0d6d35ebbe4453f0e3505d96afd93f6b096
+pytest==8.1.1 \
+ --hash=sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7 \
+ --hash=sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044
# via
# -r requirements/dev.in
# pytest-asyncio
# pytest-cov
# pytest-mock
-pytest-asyncio==0.23.5 \
- --hash=sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675 \
- --hash=sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac
+pytest-asyncio==0.23.5.post1 \
+ --hash=sha256:30f54d27774e79ac409778889880242b0403d09cabd65b727ce90fe92dd5d80e \
+ --hash=sha256:b9a8806bea78c21276bc34321bbf234ba1b2ea5b30d9f0ce0f2dea45e4685813
# via -r requirements/dev.in
pytest-cov==4.1.0 \
--hash=sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6 \
@@ -1052,17 +1052,17 @@ tomlkit==0.12.4 \
--hash=sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b \
--hash=sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3
# via documenteer
-types-pyopenssl==24.0.0.20240228 \
- --hash=sha256:a472cf877a873549175e81972f153f44e975302a3cf17381eb5f3d41ccfb75a4 \
- --hash=sha256:cd990717d8aa3743ef0e73e0f462e64b54d90c304249232d48fece4f0f7c3c6a
+types-pyopenssl==24.0.0.20240311 \
+ --hash=sha256:6e8e8bfad34924067333232c93f7fc4b369856d8bea0d5c9d1808cb290ab1972 \
+ --hash=sha256:7bca00cfc4e7ef9c5d2663c6a1c068c35798e59670595439f6296e7ba3d58083
# via types-redis
-types-pyyaml==6.0.12.12 \
- --hash=sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062 \
- --hash=sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24
+types-pyyaml==6.0.12.20240311 \
+ --hash=sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342 \
+ --hash=sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6
# via -r requirements/dev.in
-types-redis==4.6.0.20240218 \
- --hash=sha256:5103d7e690e5c74c974a161317b2d59ac2303cf8bef24175b04c2a4c3486cb39 \
- --hash=sha256:dc9c45a068240e33a04302aec5655cf41e80f91eecffccbb2df215b2f6fc375d
+types-redis==4.6.0.20240311 \
+ --hash=sha256:6b9d68a29aba1ee400c823d8e5fe88675282eb69d7211e72fe65dbe54b33daca \
+ --hash=sha256:e049bbdff0e0a1f8e701b64636811291d21bff79bf1e7850850a44055224a85f
# via -r requirements/dev.in
typing-extensions==4.10.0 \
--hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475 \
@@ -1085,9 +1085,9 @@ urllib3==2.2.1 \
# via
# documenteer
# requests
-uvicorn==0.27.1 \
- --hash=sha256:3d9a267296243532db80c83a959a3400502165ade2c1338dea4e67915fd4745a \
- --hash=sha256:5c89da2f3895767472a35556e539fd59f7edbe9b1e9c0e1c99eebeadc61838e4
+uvicorn==0.28.0 \
+ --hash=sha256:6623abbbe6176204a4226e67607b4d52cc60ff62cda0ff177613645cefa2ece1 \
+ --hash=sha256:cab4473b5d1eaeb5a0f6375ac4bc85007ffc75c3cc1768816d9e5d589857b067
# via
# -c requirements/main.txt
# -r requirements/dev.in
diff --git a/requirements/main.txt b/requirements/main.txt
index 36c76a3f..4cdd8e8c 100644
--- a/requirements/main.txt
+++ b/requirements/main.txt
@@ -19,10 +19,6 @@ arq==0.25.0 \
--hash=sha256:d176ebadfba920c039dc578814d19b7814d67fa15f82fdccccaedb4330d65dae \
--hash=sha256:db072d0f39c0bc06b436db67ae1f315c81abc1527563b828955670531815290b
# via safir
-async-timeout==4.0.3 \
- --hash=sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f \
- --hash=sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028
- # via redis
asyncpg==0.29.0 \
--hash=sha256:0009a300cae37b8c525e5b449233d59cd9868fd35431abc470a3e364d2b85cb9 \
--hash=sha256:000c996c53c04770798053e1730d34e30cb645ad95a63265aec82da9093d88e7 \
@@ -598,9 +594,9 @@ nbformat==5.9.2 \
# -r requirements/main.in
# nbclient
# nbconvert
-packaging==23.2 \
- --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \
- --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7
+packaging==24.0 \
+ --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \
+ --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9
# via
# gunicorn
# nbconvert
@@ -879,9 +875,9 @@ pyzmq==25.1.2 \
--hash=sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872 \
--hash=sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30
# via jupyter-client
-redis[hiredis]==5.0.2 \
- --hash=sha256:3f82cc80d350e93042c8e6e7a5d0596e4dd68715babffba79492733e1f367037 \
- --hash=sha256:4caa8e1fcb6f3c0ef28dba99535101d80934b7d4cd541bbb47f4a3826ee472d1
+redis[hiredis]==5.0.3 \
+ --hash=sha256:4973bae7444c0fbed64a06b87446f79361cb7e4ec1538c022d696ed7a5015580 \
+ --hash=sha256:5da9b8fe9e1254293756c16c008e8620b3d15fcc6dde6babde9541850e72a32d
# via
# arq
# safir
@@ -1119,9 +1115,9 @@ uritemplate==4.1.1 \
--hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \
--hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e
# via gidgethub
-uvicorn[standard]==0.27.1 \
- --hash=sha256:3d9a267296243532db80c83a959a3400502165ade2c1338dea4e67915fd4745a \
- --hash=sha256:5c89da2f3895767472a35556e539fd59f7edbe9b1e9c0e1c99eebeadc61838e4
+uvicorn[standard]==0.28.0 \
+ --hash=sha256:6623abbbe6176204a4226e67607b4d52cc60ff62cda0ff177613645cefa2ece1 \
+ --hash=sha256:cab4473b5d1eaeb5a0f6375ac4bc85007ffc75c3cc1768816d9e5d589857b067
# via -r requirements/main.in
uvloop==0.19.0 \
--hash=sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd \
diff --git a/src/timessquare/__init__.py b/src/timessquare/__init__.py
index 51188df2..5abea412 100644
--- a/src/timessquare/__init__.py
+++ b/src/timessquare/__init__.py
@@ -8,7 +8,7 @@
"""The application version string (PEP 440 / SemVer compatible)."""
try:
- __version__ = version(__name__)
+ __version__ = version("times-square")
except PackageNotFoundError:
# package is not installed
__version__ = "0.0.0"
diff --git a/src/timessquare/config.py b/src/timessquare/config.py
index 0b52ede6..3a63b786 100644
--- a/src/timessquare/config.py
+++ b/src/timessquare/config.py
@@ -218,6 +218,16 @@ class Config(BaseSettings):
),
]
+ slack_webhook_url: Annotated[
+ HttpUrl | None,
+ Field(
+ alias="TS_SLACK_WEBHOOK_URL",
+ description=(
+ "Webhook URL for sending error messages to a Slack channel."
+ ),
+ ),
+ ] = None
+
@field_validator("path_prefix")
@classmethod
def validate_path_prefix(cls, v: str) -> str:
diff --git a/src/timessquare/domain/page.py b/src/timessquare/domain/page.py
index 3c8df26c..126c5d09 100644
--- a/src/timessquare/domain/page.py
+++ b/src/timessquare/domain/page.py
@@ -342,7 +342,8 @@ def read_ipynb(source: str) -> nbformat.NotebookNode:
try:
return nbformat.reads(source, as_version=NB_VERSION)
except Exception as e:
- raise PageNotebookFormatError(str(e)) from e
+ message = f"The notebook is not a valid ipynb file.\n\n{e}"
+ raise PageNotebookFormatError(message) from e
@staticmethod
def write_ipynb(notebook: nbformat.NotebookNode) -> str:
@@ -399,9 +400,9 @@ def validate_parameter_name(name: str) -> None:
They also cannot be Python keywords.
"""
if parameter_name_pattern.match(name) is None:
- raise ParameterNameValidationError(name)
+ raise ParameterNameValidationError.for_param(name)
if keyword.iskeyword(name):
- raise ParameterNameValidationError(name)
+ raise ParameterNameValidationError.for_param(name)
def resolve_and_validate_values(
self, requested_values: Mapping[str, Any]
@@ -429,14 +430,16 @@ def resolve_and_validate_values(
try:
cast_values[name] = self.parameters[name].cast_value(value)
except PageParameterValueCastingError as e:
- raise PageParameterError(
+ raise PageParameterError.for_param(
name, value, self.parameters[name]
) from e
# Ensure each parameter's value is valid
for name, value in cast_values.items():
if not self.parameters[name].validate(value):
- raise PageParameterError(name, value, self.parameters[name])
+ raise PageParameterError.for_param(
+ name, value, self.parameters[name]
+ )
return cast_values
@@ -568,14 +571,17 @@ def create_and_validate(
try:
Draft202012Validator.check_schema(json_schema)
except jsonschema.exceptions.SchemaError as e:
- raise ParameterSchemaError(name, str(e)) from e
+ message = f"The schema for the {name} parameter is invalid.\n\n{e}"
+ raise ParameterSchemaError.for_param(name, message) from e
if "default" not in json_schema:
- raise ParameterDefaultMissingError(name)
+ raise ParameterDefaultMissingError.for_param(name)
instance = cls.create(json_schema)
if not instance.validate(json_schema["default"]):
- raise ParameterDefaultInvalidError(name, json_schema["default"])
+ raise ParameterDefaultInvalidError.for_param(
+ name, json_schema["default"]
+ )
return instance
@@ -629,13 +635,17 @@ def cast_value(self, v: Any) -> Any: # noqa: C901 PLR0912
elif v.lower() == "false":
return False
else:
- raise PageParameterValueCastingError(v, schema_type)
+ raise PageParameterValueCastingError.for_value(
+ v, schema_type
+ )
else:
return v
else:
- raise PageParameterValueCastingError(v, schema_type)
+ raise PageParameterValueCastingError.for_value(v, schema_type)
except ValueError as e:
- raise PageParameterValueCastingError(v, schema_type) from e
+ raise PageParameterValueCastingError.for_value(
+ v, schema_type
+ ) from e
@dataclass
diff --git a/src/timessquare/exceptions.py b/src/timessquare/exceptions.py
index 1c5fcc0a..ccf6bfc3 100644
--- a/src/timessquare/exceptions.py
+++ b/src/timessquare/exceptions.py
@@ -2,80 +2,71 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, Any, ClassVar
+from typing import TYPE_CHECKING, Any, Self
from fastapi import status
+from safir.fastapi import ClientRequestError
+from safir.models import ErrorLocation
if TYPE_CHECKING:
from .domain.page import PageParameterSchema
-class TimesSquareError(Exception):
- """Root error for Times Square services."""
+class TimesSquareClientError(ClientRequestError):
+ """Error related to a request from an API client."""
- error: ClassVar[str] = "times_square_error"
- """Used as the ``type`` field of the error message.
- Should be overridden by any subclass.
- """
-
- status_code: ClassVar[int] = status.HTTP_500_INTERNAL_SERVER_ERROR
- """HTTP status code for this type of validation error."""
-
- def to_dict(self) -> dict[str, list[str] | str]:
- """Convert the exception to a dictionary suitable for the exception.
-
- The return value is intended to be passed as the ``detail`` parameter
- to a `fastapi.HTTPException`.
- """
- return {
- "msg": str(self),
- "type": self.error,
- }
-
-
-class PageNotFoundError(TimesSquareError):
+class PageNotFoundError(TimesSquareClientError):
"""Error related to a page not being found."""
error = "page_not_found"
+ status_code = status.HTTP_404_NOT_FOUND
- status_code = 404
-
- def __init__(self, name: str) -> None:
- message = f"Page {name} not found."
- super().__init__(message)
+ @classmethod
+ def for_page_id(
+ cls,
+ page_id: str,
+ location: ErrorLocation | None = None,
+ field_path: list[str] | None = None,
+ ) -> Self:
+ """Create an exception with a message based on requested page ID."""
+ message = f"Page {page_id} not found."
+ return cls(message, location=location, field_path=field_path)
-class PageNotebookFormatError(TimesSquareError):
+class PageNotebookFormatError(TimesSquareClientError):
"""Error related to parsing an ipynb file."""
error = "ipynb_invalid"
-
status_code = status.HTTP_422_UNPROCESSABLE_ENTITY
- def __init__(self, message: str) -> None:
- message = f"The notebook is not a valid ipynb file.\n\n{message}"
- super().__init__(message)
-
-class PageParameterError(TimesSquareError):
+class PageParameterError(TimesSquareClientError):
"""Error related to a page parameter's value."""
error = "parameter_value_invalid"
-
status_code = status.HTTP_422_UNPROCESSABLE_ENTITY
- def __init__(
- self, name: str, value: Any, schema: PageParameterSchema
- ) -> None:
+ @classmethod
+ def for_param(
+ cls,
+ name: str,
+ value: Any,
+ schema: PageParameterSchema,
+ location: ErrorLocation | None = None,
+ field_path: list[str] | None = None,
+ ) -> Self:
+ """Create an exception with a message based on the parameter name,
+ value, and schema.
+ """
message = (
f"Value {value!r} for the {name} parameter is invalid. The "
f"schema is:\n\n{schema!s}"
)
- super().__init__(message)
+ return cls(message, location=location, field_path=field_path)
-class PageParameterValueCastingError(TimesSquareError):
+class PageParameterValueCastingError(TimesSquareClientError):
"""Error related to casting a parameter's value.
Usually this error is converted into a `PageParameterError` since the
@@ -83,48 +74,42 @@ class PageParameterValueCastingError(TimesSquareError):
"""
error = "parameter_value_casting_error"
-
status_code = status.HTTP_422_UNPROCESSABLE_ENTITY
- def __init__(self, value: Any, schema_type: Any) -> None:
+ @classmethod
+ def for_value(
+ cls,
+ value: Any,
+ schema_type: Any,
+ location: ErrorLocation | None = None,
+ field_path: list[str] | None = None,
+ ) -> Self:
+ """Create an exception with a message based on the value and schema
+ type.
+ """
message = f"Value {value!r} cannot be cast to type {schema_type}"
- super().__init__(message)
-
-
-class ParameterSchemaValidationError(TimesSquareError):
- """Error related to a parameter.
+ return cls(message, location=location, field_path=field_path)
- There is a global handler for this exception and all exceptions derived
- from it that returns an HTTP 422 status code with a body that's consistent
- with the error messages generated internally by FastAPI. It should be
- used for input and parameter validation errors that cannot be caught by
- FastAPI for whatever reason.
- Parameters
- ----------
- message : `str`
- The error message (used as the ``msg`` key).
- parameter_name : `str`
- The name of the invalid parameter.
- """
+class ParameterSchemaValidationError(TimesSquareClientError):
+ """Error related to a parameter."""
error = "parameter_validation_failed"
-
status_code = status.HTTP_422_UNPROCESSABLE_ENTITY
- def __init__(self, message: str, parameter_name: str) -> None:
- super().__init__(message)
- self.parameter_name = parameter_name
-
- def to_dict(self) -> dict[str, list[str] | str]:
- """Convert the exception to a dictionary suitable for the exception.
+ parameter: str
+ """The name of the parameter that caused the error."""
- The return value is intended to be passed as the ``detail`` parameter
- to a `fastapi.HTTPException`.
- """
- details = super().to_dict()
- details["name"] = self.parameter_name
- return details
+ def __init__(
+ self,
+ parameter: str,
+ message: str,
+ location: ErrorLocation | None = None,
+ field_path: list[str] | None = None,
+ ) -> None:
+ """Create an exception with a message based on the parameter name."""
+ super().__init__(message, location=location, field_path=field_path)
+ self.parameter = parameter
class ParameterNameValidationError(ParameterSchemaValidationError):
@@ -132,9 +117,15 @@ class ParameterNameValidationError(ParameterSchemaValidationError):
error = "invalid_parameter_name"
- def __init__(self, name: str) -> None:
+ @classmethod
+ def for_param(
+ cls,
+ name: str,
+ location: ErrorLocation | None = None,
+ field_path: list[str] | None = None,
+ ) -> Self:
message = f"Parameter name {name} is not valid."
- super().__init__(message, name)
+ return cls(name, message, location=location, field_path=field_path)
class ParameterSchemaError(ParameterSchemaValidationError):
@@ -142,8 +133,15 @@ class ParameterSchemaError(ParameterSchemaValidationError):
error = "invalid_parameter_schema"
- def __init__(self, name: str, message: str) -> None:
- super().__init__(message, name)
+ @classmethod
+ def for_param(
+ cls,
+ name: str,
+ message: str,
+ location: ErrorLocation | None = None,
+ field_path: list[str] | None = None,
+ ) -> Self:
+ return cls(name, message, location=location, field_path=field_path)
class ParameterDefaultMissingError(ParameterSchemaValidationError):
@@ -151,9 +149,15 @@ class ParameterDefaultMissingError(ParameterSchemaValidationError):
error = "parameter_default_missing"
- def __init__(self, name: str) -> None:
+ @classmethod
+ def for_param(
+ cls,
+ name: str,
+ location: ErrorLocation | None = None,
+ field_path: list[str] | None = None,
+ ) -> Self:
message = f"Parameter {name} is missing a default."
- super().__init__(message, name)
+ return cls(name, message, location=location, field_path=field_path)
class ParameterDefaultInvalidError(ParameterSchemaValidationError):
@@ -163,6 +167,13 @@ class ParameterDefaultInvalidError(ParameterSchemaValidationError):
error = "parameter_default_invalid"
- def __init__(self, name: str, default: Any) -> None:
+ @classmethod
+ def for_param(
+ cls,
+ name: str,
+ default: Any,
+ location: ErrorLocation | None = None,
+ field_path: list[str] | None = None,
+ ) -> Self:
message = f"Parameter {name}'s default is invalid: {default!s}."
- super().__init__(message, name)
+ return cls(name, message, location=location, field_path=field_path)
diff --git a/src/timessquare/handlers/v1/endpoints.py b/src/timessquare/handlers/v1/endpoints.py
index 6f7a10c6..e78d5a19 100644
--- a/src/timessquare/handlers/v1/endpoints.py
+++ b/src/timessquare/handlers/v1/endpoints.py
@@ -6,12 +6,19 @@
from fastapi.responses import HTMLResponse, PlainTextResponse
from pydantic import AnyHttpUrl
from safir.metadata import get_metadata
+from safir.models import ErrorLocation, ErrorModel
+from safir.slack.webhook import SlackRouteErrorHandler
from timessquare.config import config
from timessquare.dependencies.requestcontext import (
RequestContext,
context_dependency,
)
+from timessquare.exceptions import (
+ PageNotebookFormatError,
+ PageNotFoundError,
+ ParameterSchemaValidationError,
+)
from ..apitags import ApiTags
from .models import (
@@ -26,7 +33,7 @@
__all__ = ["v1_router"]
-v1_router = APIRouter()
+v1_router = APIRouter(route_class=SlackRouteErrorHandler)
"""FastAPI router for all v1 handlers."""
display_path_parameter = Path(
@@ -99,6 +106,9 @@ async def get_index(
summary="Page metadata",
name="get_page",
tags=[ApiTags.pages],
+ responses={
+ 404: {"description": "Page not found", "model": ErrorModel},
+ },
)
async def get_page(
context: Annotated[RequestContext, Depends(context_dependency)],
@@ -109,7 +119,12 @@ async def get_page(
"""
page_service = context.page_service
async with context.session.begin():
- page_domain = await page_service.get_page(page)
+ try:
+ page_domain = await page_service.get_page(page)
+ except PageNotFoundError as e:
+ e.location = ErrorLocation.path
+ e.field_path = ["page"]
+ raise
context.response.headers["location"] = str(
context.request.url_for("get_page", page=page_domain.name)
@@ -143,6 +158,12 @@ async def get_pages(
summary="Create a new page",
status_code=201,
tags=[ApiTags.pages],
+ responses={
+ 422: {
+ "description": "Invalid ipynb",
+ "model": ErrorModel,
+ },
+ },
)
async def post_page(
request_data: PostPageRequest,
@@ -214,15 +235,22 @@ async def post_page(
authors = [a.to_domain() for a in request_data.authors]
async with context.session.begin():
- page_exec = await page_service.create_page_with_notebook_from_upload(
- title=request_data.title,
- ipynb=request_data.ipynb,
- uploader_username=username,
- authors=authors,
- tags=request_data.tags,
- description=request_data.description,
- cache_ttl=request_data.cache_ttl,
- )
+ try:
+ page_exec = (
+ await page_service.create_page_with_notebook_from_upload(
+ title=request_data.title,
+ ipynb=request_data.ipynb,
+ uploader_username=username,
+ authors=authors,
+ tags=request_data.tags,
+ description=request_data.description,
+ cache_ttl=request_data.cache_ttl,
+ )
+ )
+ except PageNotebookFormatError as e:
+ e.location = ErrorLocation.body
+ e.field_path = ["ipynb"]
+ raise
page = await page_service.get_page(page_exec.name)
context.response.headers["location"] = str(
@@ -236,6 +264,7 @@ async def post_page(
summary="Get the source parameterized notebook (ipynb)",
name="get_page_source",
tags=[ApiTags.pages],
+ responses={404: {"description": "Page not found", "model": ErrorModel}},
)
async def get_page_source(
page: Annotated[str, page_path_parameter],
@@ -246,7 +275,12 @@ async def get_page_source(
"""
page_service = context.page_service
async with context.session.begin():
- page_domain = await page_service.get_page(page)
+ try:
+ page_domain = await page_service.get_page(page)
+ except PageNotFoundError as e:
+ e.location = ErrorLocation.path
+ e.field_path = ["page"]
+ raise
response_headers = {
"location": str(
@@ -266,6 +300,10 @@ async def get_page_source(
summary="Get the unexecuted notebook source with rendered parameters",
name="get_rendered_notebook",
tags=[ApiTags.pages],
+ responses={
+ 404: {"description": "Page not found", "model": ErrorModel},
+ 422: {"description": "Invalid parameter", "model": ErrorModel},
+ },
)
async def get_rendered_notebook(
page: Annotated[str, page_path_parameter],
@@ -277,9 +315,18 @@ async def get_rendered_notebook(
page_service = context.page_service
parameters = context.request.query_params
async with context.session.begin():
- rendered_notebook = await page_service.render_page_template(
- page, parameters
- )
+ try:
+ rendered_notebook = await page_service.render_page_template(
+ page, parameters
+ )
+ except PageNotFoundError as e:
+ e.location = ErrorLocation.path
+ e.field_path = ["page"]
+ raise
+ except ParameterSchemaValidationError as e:
+ e.location = ErrorLocation.query
+ e.field_path = [e.parameter]
+ raise
return PlainTextResponse(rendered_notebook, media_type="application/json")
@@ -288,6 +335,10 @@ async def get_rendered_notebook(
summary="Get the HTML page of an computed notebook",
name="get_page_html",
tags=[ApiTags.pages],
+ responses={
+ 404: {"description": "Page not found", "model": ErrorModel},
+ 422: {"description": "Invalid parameter", "model": ErrorModel},
+ },
)
async def get_page_html(
page: Annotated[str, page_path_parameter],
@@ -296,9 +347,18 @@ async def get_page_html(
"""Get the rendered HTML of a notebook."""
page_service = context.page_service
async with context.session.begin():
- html = await page_service.get_html(
- name=page, query_params=context.request.query_params
- )
+ try:
+ html = await page_service.get_html(
+ name=page, query_params=context.request.query_params
+ )
+ except PageNotFoundError as e:
+ e.location = ErrorLocation.path
+ e.field_path = ["page"]
+ raise
+ except ParameterSchemaValidationError as e:
+ e.location = ErrorLocation.query
+ e.field_path = [e.parameter]
+ raise
if not html:
raise HTTPException(
@@ -314,6 +374,10 @@ async def get_page_html(
name="get_page_html_status",
response_model=HtmlStatus,
tags=[ApiTags.pages],
+ responses={
+ 404: {"description": "Page not found", "model": ErrorModel},
+ 422: {"description": "Invalid parameter", "model": ErrorModel},
+ },
)
async def get_page_html_status(
page: Annotated[str, page_path_parameter],
@@ -321,9 +385,18 @@ async def get_page_html_status(
) -> HtmlStatus:
page_service = context.page_service
async with context.session.begin():
- html = await page_service.get_html(
- name=page, query_params=context.request.query_params
- )
+ try:
+ html = await page_service.get_html(
+ name=page, query_params=context.request.query_params
+ )
+ except PageNotFoundError as e:
+ e.location = ErrorLocation.path
+ e.field_path = ["page"]
+ raise
+ except ParameterSchemaValidationError as e:
+ e.location = ErrorLocation.query
+ e.field_path = [e.parameter]
+ raise
return HtmlStatus.from_html(html=html, request=context.request)
@@ -357,6 +430,7 @@ async def get_github_tree(
summary="Metadata for GitHub-backed page",
name="get_github_page",
tags=[ApiTags.github],
+ responses={404: {"description": "Page not found", "model": ErrorModel}},
)
async def get_github_page(
display_path: Annotated[str, display_path_parameter],
@@ -370,7 +444,14 @@ async def get_github_page(
"""
page_service = context.page_service
async with context.session.begin():
- page_domain = await page_service.get_github_backed_page(display_path)
+ try:
+ page_domain = await page_service.get_github_backed_page(
+ display_path
+ )
+ except PageNotFoundError as e:
+ e.location = ErrorLocation.path
+ e.field_path = ["display_path"]
+ raise
context.response.headers["location"] = str(
context.request.url_for("get_page", page=page_domain.name)
@@ -435,6 +516,7 @@ async def get_github_pr_tree(
summary="Metadata for page in a pull request",
name="get_github_pr_page",
tags=[ApiTags.pr],
+ responses={404: {"description": "Page not found", "model": ErrorModel}},
)
async def get_github_pr_page(
owner: Annotated[str, github_owner_parameter],
@@ -446,12 +528,17 @@ async def get_github_pr_page(
"""Get the metadata for a pull request preview of a GitHub-backed page."""
page_service = context.page_service
async with context.session.begin():
- page_domain = await page_service.get_github_pr_page(
- owner=owner,
- repo=repo,
- commit=commit,
- path=path,
- )
+ try:
+ page_domain = await page_service.get_github_pr_page(
+ owner=owner,
+ repo=repo,
+ commit=commit,
+ path=path,
+ )
+ except PageNotFoundError as e:
+ e.location = ErrorLocation.path
+ e.field_path = ["page"]
+ raise
context.response.headers["location"] = str(
context.request.url_for("get_page", page=page_domain.name)
diff --git a/src/timessquare/main.py b/src/timessquare/main.py
index fa63a3af..b2dc99ba 100644
--- a/src/timessquare/main.py
+++ b/src/timessquare/main.py
@@ -14,35 +14,31 @@
from contextlib import asynccontextmanager
from importlib.metadata import version
from pathlib import Path
-from typing import TYPE_CHECKING
from fastapi import FastAPI
from fastapi.openapi.utils import get_openapi
-from fastapi.responses import JSONResponse
from safir.dependencies.arq import arq_dependency
from safir.dependencies.db_session import db_session_dependency
from safir.dependencies.http_client import http_client_dependency
+from safir.fastapi import ClientRequestError, client_request_error_handler
from safir.logging import configure_logging, configure_uvicorn_logging
from safir.middleware.x_forwarded import XForwardedMiddleware
+from safir.slack.webhook import SlackRouteErrorHandler
from structlog import get_logger
from .config import config
from .dependencies.redis import redis_dependency
-from .exceptions import TimesSquareError
from .handlers.external import external_router
from .handlers.internal import internal_router
from .handlers.v1 import v1_router
-if TYPE_CHECKING:
- from fastapi import Request
-
__all__ = ["app", "config"]
@asynccontextmanager
async def lifespan(app: FastAPI) -> AsyncIterator:
"""Context manager for the application lifespan."""
- logger = get_logger("ook")
+ logger = get_logger(__name__)
logger.debug("Times Square is starting up.")
await db_session_dependency.initialize(
@@ -75,6 +71,8 @@ async def lifespan(app: FastAPI) -> AsyncIterator:
)
configure_uvicorn_logging(config.log_level)
+logger = get_logger(__name__)
+
app = FastAPI(
title="Times Square",
description=Path(__file__).parent.joinpath("description.md").read_text(),
@@ -90,20 +88,17 @@ async def lifespan(app: FastAPI) -> AsyncIterator:
# Add middleware
app.add_middleware(XForwardedMiddleware)
+if config.slack_webhook_url:
+ SlackRouteErrorHandler.initialize(
+ str(config.slack_webhook_url), "Times Square", logger
+ )
+
# Add routers
app.include_router(internal_router)
app.include_router(external_router, prefix=f"{config.path_prefix}")
app.include_router(v1_router, prefix=f"{config.path_prefix}/v1")
-
-@app.exception_handler(TimesSquareError)
-async def ts_exception_handler(
- request: Request, exc: TimesSquareError
-) -> JSONResponse:
- """Handle Times Square errors."""
- return JSONResponse(
- status_code=exc.status_code, content={"detail": [exc.to_dict()]}
- )
+app.exception_handler(ClientRequestError)(client_request_error_handler)
def create_openapi() -> str:
diff --git a/src/timessquare/services/page.py b/src/timessquare/services/page.py
index 8f90e9f7..0319c216 100644
--- a/src/timessquare/services/page.py
+++ b/src/timessquare/services/page.py
@@ -121,14 +121,14 @@ async def get_page(self, name: str) -> PageModel:
"""Get the page from the data store, given its name."""
page = await self._page_store.get(name)
if page is None:
- raise PageNotFoundError(name)
+ raise PageNotFoundError.for_page_id(name)
return page
async def get_github_backed_page(self, display_path: str) -> PageModel:
"""Get the page based on its display path."""
page = await self._page_store.get_github_backed_page(display_path)
if page is None:
- raise PageNotFoundError(display_path)
+ raise PageNotFoundError.for_page_id(display_path)
return page
async def get_github_pr_page(
@@ -169,7 +169,7 @@ async def get_github_pr_page(
display_path, commit=commit
)
if page is None:
- raise PageNotFoundError(display_path)
+ raise PageNotFoundError.for_page_id(display_path)
return page
async def get_page_summaries(self) -> list[PageSummaryModel]:
diff --git a/src/timessquare/worker/functions/create_check_run.py b/src/timessquare/worker/functions/create_check_run.py
index e6f2a7fa..5f4f79d9 100644
--- a/src/timessquare/worker/functions/create_check_run.py
+++ b/src/timessquare/worker/functions/create_check_run.py
@@ -6,6 +6,7 @@
from safir.dependencies.db_session import db_session_dependency
from safir.github.webhooks import GitHubCheckSuiteEventModel
+from safir.slack.blockkit import SlackCodeBlock, SlackMessage, SlackTextField
from timessquare.worker.servicefactory import create_github_repo_service
@@ -23,13 +24,41 @@ async def create_check_run(
)
logger.info("Running create_check_run", payload=payload.model_dump())
- async for db_session in db_session_dependency():
- github_repo_service = await create_github_repo_service(
- http_client=ctx["http_client"],
- logger=logger,
- installation_id=payload.installation.id,
- db_session=db_session,
- )
- async with db_session.begin():
- await github_repo_service.initiate_check_runs(payload=payload)
+ try:
+ async for db_session in db_session_dependency():
+ github_repo_service = await create_github_repo_service(
+ http_client=ctx["http_client"],
+ logger=logger,
+ installation_id=payload.installation.id,
+ db_session=db_session,
+ )
+ async with db_session.begin():
+ await github_repo_service.initiate_check_runs(payload=payload)
+ except Exception as e:
+ if "slack" in ctx:
+ await ctx["slack"].post(
+ SlackMessage(
+ message="Times Square worker exception.",
+ fields=[
+ SlackTextField(
+ heading="Task", text="create_check_run"
+ ),
+ SlackTextField(
+ heading="Repository",
+ text=(
+ "https://github.com/"
+ f"{payload.repository.owner.login}/"
+ f"{payload.repository.name}"
+ ),
+ ),
+ ],
+ blocks=[
+ SlackCodeBlock(
+ heading="Exception",
+ code=str(e),
+ )
+ ],
+ )
+ )
+ raise
return "done"
diff --git a/src/timessquare/worker/functions/create_rerequested_check_run.py b/src/timessquare/worker/functions/create_rerequested_check_run.py
index 1d89c526..72dec103 100644
--- a/src/timessquare/worker/functions/create_rerequested_check_run.py
+++ b/src/timessquare/worker/functions/create_rerequested_check_run.py
@@ -6,6 +6,7 @@
from safir.dependencies.db_session import db_session_dependency
from safir.github.webhooks import GitHubCheckRunEventModel
+from safir.slack.blockkit import SlackCodeBlock, SlackMessage, SlackTextField
from timessquare.worker.servicefactory import create_github_repo_service
@@ -25,15 +26,44 @@ async def create_rerequested_check_run(
"Running create_rerequested_check_run", payload=payload.model_dump()
)
- async for db_session in db_session_dependency():
- github_repo_service = await create_github_repo_service(
- http_client=ctx["http_client"],
- logger=logger,
- installation_id=payload.installation.id,
- db_session=db_session,
- )
- async with db_session.begin():
- await github_repo_service.create_rerequested_check_run(
- payload=payload
+ try:
+ async for db_session in db_session_dependency():
+ github_repo_service = await create_github_repo_service(
+ http_client=ctx["http_client"],
+ logger=logger,
+ installation_id=payload.installation.id,
+ db_session=db_session,
)
+ async with db_session.begin():
+ await github_repo_service.create_rerequested_check_run(
+ payload=payload
+ )
+ except Exception as e:
+ if "slack" in ctx:
+ await ctx["slack"].post(
+ SlackMessage(
+ message="Times Square worker exception.",
+ fields=[
+ SlackTextField(
+ heading="Task", text="create_rerequested_check_run"
+ ),
+ SlackTextField(
+ heading="Repository",
+ text=(
+ "https://github.com/"
+ f"{payload.repository.owner.login}/"
+ f"{payload.repository.name}"
+ ),
+ ),
+ ],
+ blocks=[
+ SlackCodeBlock(
+ heading="Exception",
+ code=str(e),
+ )
+ ],
+ )
+ )
+ raise
+
return "done"
diff --git a/src/timessquare/worker/functions/pull_request_sync.py b/src/timessquare/worker/functions/pull_request_sync.py
index 0e622b91..44e6471b 100644
--- a/src/timessquare/worker/functions/pull_request_sync.py
+++ b/src/timessquare/worker/functions/pull_request_sync.py
@@ -6,6 +6,7 @@
from safir.dependencies.db_session import db_session_dependency
from safir.github.webhooks import GitHubPullRequestEventModel
+from safir.slack.blockkit import SlackCodeBlock, SlackMessage, SlackTextField
from timessquare.worker.servicefactory import create_github_repo_service
@@ -28,15 +29,43 @@ async def pull_request_sync(
)
logger.info("Running pull_request_sync")
- async for db_session in db_session_dependency():
- github_repo_service = await create_github_repo_service(
- http_client=ctx["http_client"],
- logger=logger,
- installation_id=payload.installation.id,
- db_session=db_session,
- )
- async with db_session.begin():
- await github_repo_service.check_pull_request(
- pr_payload=payload.pull_request
+ try:
+ async for db_session in db_session_dependency():
+ github_repo_service = await create_github_repo_service(
+ http_client=ctx["http_client"],
+ logger=logger,
+ installation_id=payload.installation.id,
+ db_session=db_session,
)
+ async with db_session.begin():
+ await github_repo_service.check_pull_request(
+ pr_payload=payload.pull_request
+ )
+ except Exception as e:
+ if "slack" in ctx:
+ await ctx["slack"].post(
+ SlackMessage(
+ message="Times Square worker exception.",
+ fields=[
+ SlackTextField(
+ heading="Task", text="pull_request_sync"
+ ),
+ SlackTextField(
+ heading="Repository",
+ text=(
+ "https://github.com/"
+ f"{payload.repository.owner.login}/"
+ f"{payload.repository.name}"
+ ),
+ ),
+ ],
+ blocks=[
+ SlackCodeBlock(
+ heading="Exception",
+ code=str(e),
+ )
+ ],
+ )
+ )
+ raise
return "FIXME"
diff --git a/src/timessquare/worker/functions/repo_added.py b/src/timessquare/worker/functions/repo_added.py
index 68f89926..be3433cd 100644
--- a/src/timessquare/worker/functions/repo_added.py
+++ b/src/timessquare/worker/functions/repo_added.py
@@ -10,6 +10,7 @@
GitHubAppInstallationEventRepoModel,
GitHubAppInstallationRepositoriesEventModel,
)
+from safir.slack.blockkit import SlackCodeBlock, SlackMessage, SlackTextField
from timessquare.worker.servicefactory import create_github_repo_service
@@ -32,16 +33,42 @@ async def repo_added(
)
logger.info("Running repo_added")
- async for db_session in db_session_dependency():
- github_repo_service = await create_github_repo_service(
- http_client=ctx["http_client"],
- logger=logger,
- installation_id=payload.installation.id,
- db_session=db_session,
- )
- async with db_session.begin():
- await github_repo_service.sync_from_repo_installation(
- owner=repo.owner_name,
- repo_name=repo.name,
+ try:
+ async for db_session in db_session_dependency():
+ github_repo_service = await create_github_repo_service(
+ http_client=ctx["http_client"],
+ logger=logger,
+ installation_id=payload.installation.id,
+ db_session=db_session,
)
+ async with db_session.begin():
+ await github_repo_service.sync_from_repo_installation(
+ owner=repo.owner_name,
+ repo_name=repo.name,
+ )
+ except Exception as e:
+ if "slack" in ctx:
+ await ctx["slack"].post(
+ SlackMessage(
+ message="Times Square worker exception.",
+ fields=[
+ SlackTextField(heading="Task", text="repo_added"),
+ SlackTextField(
+ heading="Repository",
+ text=(
+ f"https://github.com/{repo.owner_name}/"
+ f"{repo.name}"
+ ),
+ ),
+ ],
+ blocks=[
+ SlackCodeBlock(
+ heading="Exception",
+ code=str(e),
+ )
+ ],
+ )
+ )
+ raise
+
return "FIXME"
diff --git a/src/timessquare/worker/functions/repo_push.py b/src/timessquare/worker/functions/repo_push.py
index 853586d5..bf19ce5e 100644
--- a/src/timessquare/worker/functions/repo_push.py
+++ b/src/timessquare/worker/functions/repo_push.py
@@ -6,6 +6,7 @@
from safir.dependencies.db_session import db_session_dependency
from safir.github.webhooks import GitHubPushEventModel
+from safir.slack.blockkit import SlackCodeBlock, SlackMessage, SlackTextField
from timessquare.worker.servicefactory import create_github_repo_service
@@ -23,13 +24,39 @@ async def repo_push(
)
logger.info("Running repo_push")
- async for db_session in db_session_dependency():
- github_repo_service = await create_github_repo_service(
- http_client=ctx["http_client"],
- logger=logger,
- installation_id=payload.installation.id,
- db_session=db_session,
- )
- async with db_session.begin():
- await github_repo_service.sync_from_push(payload)
+ try:
+ async for db_session in db_session_dependency():
+ github_repo_service = await create_github_repo_service(
+ http_client=ctx["http_client"],
+ logger=logger,
+ installation_id=payload.installation.id,
+ db_session=db_session,
+ )
+ async with db_session.begin():
+ await github_repo_service.sync_from_push(payload)
+ except Exception as e:
+ if "slack" in ctx:
+ await ctx["slack"].post(
+ SlackMessage(
+ message="Times Square worker exception.",
+ fields=[
+ SlackTextField(heading="Task", text="repo_push"),
+ SlackTextField(
+ heading="Repository",
+ text=(
+ "https://github.com/"
+ f"{payload.repository.owner.login}/"
+ f"{payload.repository.name}"
+ ),
+ ),
+ ],
+ blocks=[
+ SlackCodeBlock(
+ heading="Exception",
+ code=str(e),
+ )
+ ],
+ )
+ )
+ raise
return "FIXME"
diff --git a/src/timessquare/worker/functions/repo_removed.py b/src/timessquare/worker/functions/repo_removed.py
index 839d02b4..8a359249 100644
--- a/src/timessquare/worker/functions/repo_removed.py
+++ b/src/timessquare/worker/functions/repo_removed.py
@@ -10,6 +10,7 @@
GitHubAppInstallationEventRepoModel,
GitHubAppInstallationRepositoriesEventModel,
)
+from safir.slack.blockkit import SlackCodeBlock, SlackMessage, SlackTextField
from timessquare.worker.servicefactory import create_page_service
@@ -34,14 +35,39 @@ async def repo_removed(
)
logger.info("Running repo_removed")
- async for db_session in db_session_dependency():
- page_service = await create_page_service(
- http_client=ctx["http_client"],
- logger=logger,
- db_session=db_session,
- )
- async with db_session.begin():
- await page_service.soft_delete_pages_for_repo(
- owner=repo.owner_name, name=repo.name
+ try:
+ async for db_session in db_session_dependency():
+ page_service = await create_page_service(
+ http_client=ctx["http_client"],
+ logger=logger,
+ db_session=db_session,
)
+ async with db_session.begin():
+ await page_service.soft_delete_pages_for_repo(
+ owner=repo.owner_name, name=repo.name
+ )
+ except Exception as e:
+ if "slack" in ctx:
+ await ctx["slack"].post(
+ SlackMessage(
+ message="Times Square worker exception.",
+ fields=[
+ SlackTextField(heading="Task", text="repo_removed"),
+ SlackTextField(
+ heading="Repository",
+ text=(
+ f"https://github.com/{repo.owner_name}/"
+ f"{repo.name}"
+ ),
+ ),
+ ],
+ blocks=[
+ SlackCodeBlock(
+ heading="Exception",
+ code=str(e),
+ )
+ ],
+ )
+ )
+ raise
return "FIXME"
diff --git a/src/timessquare/worker/main.py b/src/timessquare/worker/main.py
index 76075349..0f6ea7fe 100644
--- a/src/timessquare/worker/main.py
+++ b/src/timessquare/worker/main.py
@@ -10,7 +10,10 @@
import structlog
from safir.dependencies.db_session import db_session_dependency
from safir.logging import configure_logging
+from safir.slack.blockkit import SlackMessage, SlackTextField
+from safir.slack.webhook import SlackWebhookClient
+from timessquare import __version__
from timessquare.config import config
from timessquare.dependencies.redis import redis_dependency
@@ -43,8 +46,15 @@ async def startup(ctx: dict[Any, Any]) -> None:
http_client = httpx.AsyncClient()
ctx["http_client"] = http_client
+ if config.slack_webhook_url:
+ slack_client = SlackWebhookClient(
+ str(config.slack_webhook_url),
+ "Times Square worker",
+ logger=logger,
+ )
+ ctx["slack"] = slack_client
+
ctx["logger"] = logger
- logger.info("Start up complete")
# Set up FastAPI dependencies; we can use them "manually" with
# arq to provide resources similarly to FastAPI endpoints
@@ -53,6 +63,21 @@ async def startup(ctx: dict[Any, Any]) -> None:
)
await redis_dependency.initialize(str(config.redis_url))
+ logger.info("Start up complete")
+
+ if "slack" in ctx:
+ await ctx["slack"].post(
+ SlackMessage(
+ message="Times Square worker started up.",
+ fields=[
+ SlackTextField(
+ heading="Version",
+ text=__version__,
+ ),
+ ],
+ )
+ )
+
async def shutdown(ctx: dict[Any, Any]) -> None:
"""Shut-down resources."""
@@ -72,6 +97,19 @@ async def shutdown(ctx: dict[Any, Any]) -> None:
logger.info("Worker shutdown complete.")
+ if "slack" in ctx:
+ await ctx["slack"].post(
+ SlackMessage(
+ message="Times Square worker shut down.",
+ fields=[
+ SlackTextField(
+ heading="Version",
+ text=__version__,
+ ),
+ ],
+ )
+ )
+
class WorkerSettings:
"""Configuration for a Times Square arq worker.
diff --git a/tests/handlers/v1/pages_test.py b/tests/handlers/v1/pages_test.py
index d1183de0..d50c0b0f 100644
--- a/tests/handlers/v1/pages_test.py
+++ b/tests/handlers/v1/pages_test.py
@@ -104,7 +104,6 @@ async def test_pages(client: AsyncClient, respx_mock: respx.Router) -> None:
assert r.status_code == 422
error_data = r.json()
assert error_data["detail"][0]["type"] == "parameter_default_invalid"
- assert error_data["detail"][0]["name"] == "A"
assert error_data["detail"][0]["msg"] == (
"Parameter A's default is invalid: -1."
)