` + sponsors.forEach(function (sponsor) { + html += ` + + + + ` + }); + html += '
' + sponsorsDiv.innerHTML = html; + } + }); +} + +function updateInsidersPage(author_username) { + const sponsorURL = `https://github.com/sponsors/${author_username}` + const dataURL = `https://raw.githubusercontent.com/${author_username}/sponsors/main`; + getJSON(dataURL + '/numbers.json', function (err, numbers) { + document.getElementById('sponsors-count').innerHTML = numbers.count; + Array.from(document.getElementsByClassName('sponsors-total')).forEach(function (element) { + element.innerHTML = '$ ' + humanReadableAmount(numbers.total); + }); + getJSON(dataURL + '/sponsors.json', function (err, sponsors) { + const sponsorsElem = document.getElementById('sponsors'); + const privateSponsors = numbers.count - sponsors.length; + sponsors.forEach(function (sponsor) { + sponsorsElem.innerHTML += ` + + + + `; + }); + if (privateSponsors > 0) { + sponsorsElem.innerHTML += ` + + +${privateSponsors} + + `; + } + }); + }); + updatePremiumSponsors(dataURL, "gold"); + updatePremiumSponsors(dataURL, "silver"); + updatePremiumSponsors(dataURL, "bronze"); +} diff --git a/docs/license.md b/docs/license.md new file mode 100644 index 0000000..e81c0ed --- /dev/null +++ b/docs/license.md @@ -0,0 +1,10 @@ +--- +hide: +- feedback +--- + +# License + +``` +--8<-- "LICENSE" +``` diff --git a/duties.py b/duties.py new file mode 100644 index 0000000..367fc17 --- /dev/null +++ b/duties.py @@ -0,0 +1,235 @@ +"""Development tasks.""" + +from __future__ import annotations + +import os +import sys +from contextlib import contextmanager +from importlib.metadata import version as pkgversion +from pathlib import Path +from typing import TYPE_CHECKING + +from duty import duty, tools + +if TYPE_CHECKING: + from collections.abc import Iterator + + from duty.context import Context + + +PY_SRC_PATHS = (Path(_) for _ in ("src", "tests", "duties.py", "scripts")) +PY_SRC_LIST = tuple(str(_) for _ in PY_SRC_PATHS) +PY_SRC = " ".join(PY_SRC_LIST) +CI = os.environ.get("CI", "0") in {"1", "true", "yes", ""} +WINDOWS = os.name == "nt" +PTY = not WINDOWS and not CI +MULTIRUN = os.environ.get("MULTIRUN", "0") == "1" + + +def pyprefix(title: str) -> str: # noqa: D103 + if MULTIRUN: + prefix = f"(python{sys.version_info.major}.{sys.version_info.minor})" + return f"{prefix:14}{title}" + return title + + +@contextmanager +def material_insiders() -> Iterator[bool]: # noqa: D103 + if "+insiders" in pkgversion("mkdocs-material"): + os.environ["MATERIAL_INSIDERS"] = "true" + try: + yield True + finally: + os.environ.pop("MATERIAL_INSIDERS") + else: + yield False + + +@duty +def changelog(ctx: Context, bump: str = "") -> None: + """Update the changelog in-place with latest commits. + + Parameters: + bump: Bump option passed to git-changelog. + """ + ctx.run(tools.git_changelog(bump=bump or None), title="Updating changelog") + + +@duty(pre=["check-quality", "check-types", "check-docs", "check-api"]) +def check(ctx: Context) -> None: + """Check it all!""" + + +@duty +def check_quality(ctx: Context) -> None: + """Check the code quality.""" + ctx.run( + tools.ruff.check(*PY_SRC_LIST, config="config/ruff.toml"), + title=pyprefix("Checking code quality"), + ) + + +@duty +def check_docs(ctx: Context) -> None: + """Check if the documentation builds correctly.""" + Path("htmlcov").mkdir(parents=True, exist_ok=True) + Path("htmlcov/index.html").touch(exist_ok=True) + with material_insiders(): + ctx.run( + tools.mkdocs.build(strict=True, verbose=True), + title=pyprefix("Building documentation"), + ) + + +@duty +def check_types(ctx: Context) -> None: + """Check that the code is correctly typed.""" + ctx.run( + tools.mypy(*PY_SRC_LIST, config_file="config/mypy.ini"), + title=pyprefix("Type-checking"), + ) + + +@duty +def check_api(ctx: Context, *cli_args: str) -> None: + """Check for API breaking changes.""" + ctx.run( + tools.griffe.check("griffe_pydantic", search=["src"], color=True).add_args(*cli_args), + title="Checking for API breaking changes", + nofail=True, + ) + + +@duty +def docs(ctx: Context, *cli_args: str, host: str = "127.0.0.1", port: int = 8000) -> None: + """Serve the documentation (localhost:8000). + + Parameters: + host: The host to serve the docs from. + port: The port to serve the docs on. + """ + with material_insiders(): + ctx.run( + tools.mkdocs.serve(dev_addr=f"{host}:{port}").add_args(*cli_args), + title="Serving documentation", + capture=False, + ) + + +@duty +def docs_deploy(ctx: Context, *, force: bool = False) -> None: + """Deploy the documentation to GitHub pages. + + Parameters: + force: Whether to force deployment, even from non-Insiders version. + """ + os.environ["DEPLOY"] = "true" + with material_insiders() as insiders: + if not insiders: + ctx.run(lambda: False, title="Not deploying docs without Material for MkDocs Insiders!") + origin = ctx.run("git config --get remote.origin.url", silent=True, allow_overrides=False) + if "pawamoy-insiders/griffe-pydantic" in origin: + ctx.run( + "git remote add upstream git@github.com:mkdocstrings/griffe-pydantic", + silent=True, + nofail=True, + allow_overrides=False, + ) + ctx.run( + tools.mkdocs.gh_deploy(remote_name="upstream", force=True), + title="Deploying documentation", + ) + elif force: + ctx.run( + tools.mkdocs.gh_deploy(force=True), + title="Deploying documentation", + ) + else: + ctx.run( + lambda: False, + title="Not deploying docs from public repository (do that from insiders instead!)", + nofail=True, + ) + + +@duty +def format(ctx: Context) -> None: + """Run formatting tools on the code.""" + ctx.run( + tools.ruff.check(*PY_SRC_LIST, config="config/ruff.toml", fix_only=True, exit_zero=True), + title="Auto-fixing code", + ) + ctx.run(tools.ruff.format(*PY_SRC_LIST, config="config/ruff.toml"), title="Formatting code") + + +@duty +def build(ctx: Context) -> None: + """Build source and wheel distributions.""" + ctx.run( + tools.build(), + title="Building source and wheel distributions", + pty=PTY, + ) + + +@duty +def publish(ctx: Context) -> None: + """Publish source and wheel distributions to PyPI.""" + if not Path("dist").exists(): + ctx.run("false", title="No distribution files found") + dists = [str(dist) for dist in Path("dist").iterdir()] + ctx.run( + tools.twine.upload(*dists, skip_existing=True), + title="Publishing source and wheel distributions to PyPI", + pty=PTY, + ) + + +@duty(post=["build", "publish", "docs-deploy"]) +def release(ctx: Context, version: str = "") -> None: + """Release a new Python package. + + Parameters: + version: The new version number to use. + """ + origin = ctx.run("git config --get remote.origin.url", silent=True) + if "pawamoy-insiders/griffe-pydantic" in origin: + ctx.run( + lambda: False, + title="Not releasing from insiders repository (do that from public repo instead!)", + ) + if not (version := (version or input("> Version to release: ")).strip()): + ctx.run("false", title="A version must be provided") + ctx.run("git add pyproject.toml CHANGELOG.md", title="Staging files", pty=PTY) + ctx.run(["git", "commit", "-m", f"chore: Prepare release {version}"], title="Committing changes", pty=PTY) + ctx.run(f"git tag {version}", title="Tagging commit", pty=PTY) + ctx.run("git push", title="Pushing commits", pty=False) + ctx.run("git push --tags", title="Pushing tags", pty=False) + + +@duty(silent=True, aliases=["cov"]) +def coverage(ctx: Context) -> None: + """Report coverage as text and HTML.""" + ctx.run(tools.coverage.combine(), nofail=True) + ctx.run(tools.coverage.report(rcfile="config/coverage.ini"), capture=False) + ctx.run(tools.coverage.html(rcfile="config/coverage.ini")) + + +@duty +def test(ctx: Context, *cli_args: str, match: str = "") -> None: + """Run the test suite. + + Parameters: + match: A pytest expression to filter selected tests. + """ + py_version = f"{sys.version_info.major}{sys.version_info.minor}" + os.environ["COVERAGE_FILE"] = f".coverage.{py_version}" + ctx.run( + tools.pytest( + "tests", + config_file="config/pytest.ini", + select=match, + color="yes", + ).add_args("-n", "auto", *cli_args), + title=pyprefix("Running tests"), + ) diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 0000000..ca7069d --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,176 @@ +site_name: "griffe-pydantic" +site_description: "Griffe extension for Pydantic." +site_url: "https://mkdocstrings.github.io/griffe-pydantic" +repo_url: "https://github.com/mkdocstrings/griffe-pydantic" +repo_name: "mkdocstrings/griffe-pydantic" +site_dir: "site" +watch: [mkdocs.yml, README.md, CONTRIBUTING.md, CHANGELOG.md, src/griffe_pydantic] +copyright: Copyright © 2023 Timothée Mazzucotelli +edit_uri: edit/main/docs/ + +validation: + omitted_files: warn + absolute_links: warn + unrecognized_links: warn + +nav: +- Home: + - Overview: index.md + - Changelog: changelog.md + - Credits: credits.md + - License: license.md +# defer to gen-files + literate-nav +- API reference: + - griffe-pydantic: reference/ +- Development: + - Contributing: contributing.md + - Code of Conduct: code_of_conduct.md + # - Coverage report: coverage.md +- Insiders: + - insiders/index.md + - Getting started: + - Installation: insiders/installation.md + - Changelog: insiders/changelog.md +- Author's website: https://pawamoy.github.io/ + +theme: + name: material + custom_dir: docs/.overrides + icon: + logo: material/currency-sign + features: + - announce.dismiss + - content.action.edit + - content.action.view + - content.code.annotate + - content.code.copy + - content.tooltips + - navigation.footer + - navigation.indexes + - navigation.sections + - navigation.tabs + - navigation.tabs.sticky + - navigation.top + - search.highlight + - search.suggest + - toc.follow + palette: + - media: "(prefers-color-scheme)" + toggle: + icon: material/brightness-auto + name: Switch to light mode + - media: "(prefers-color-scheme: light)" + scheme: default + primary: teal + accent: purple + toggle: + icon: material/weather-sunny + name: Switch to dark mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: black + accent: lime + toggle: + icon: material/weather-night + name: Switch to system preference + +extra_css: +- css/material.css +- css/mkdocstrings.css +- css/insiders.css + +extra_javascript: +- js/feedback.js + +markdown_extensions: +- attr_list +- admonition +- callouts +- footnotes +- pymdownx.blocks.tab: + alternate_style: true + slugify: !!python/object/apply:pymdownx.slugs.slugify + kwds: + case: lower +- pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg +- pymdownx.magiclink +- pymdownx.snippets: + base_path: [!relative $config_dir] + check_paths: true +- pymdownx.superfences +- pymdownx.tasklist: + custom_checkbox: true +- toc: + permalink: "¤" + +plugins: +- search +- markdown-exec +- gen-files: + scripts: + - scripts/gen_ref_nav.py +- literate-nav: + nav_file: SUMMARY.md +# - coverage +- mkdocstrings: + handlers: + python: + paths: [src, docs/examples] + import: + - https://docs.python.org/3/objects.inv + - https://mkdocstrings.github.io/griffe/objects.inv + - https://docs.pydantic.dev/latest/objects.inv + options: + docstring_options: + ignore_init_summary: true + docstring_section_style: list + filters: ["!^_"] + heading_level: 1 + inherited_members: true + merge_init_into_class: true + separate_signature: true + show_root_heading: true + show_root_full_path: false + show_signature_annotations: true + show_source: false + show_symbol_type_heading: true + show_symbol_type_toc: true + signature_crossrefs: true + summary: true +- git-revision-date-localized: + enabled: !ENV [DEPLOY, false] + enable_creation_date: true + type: timeago +- minify: + minify_html: !ENV [DEPLOY, false] +- group: + enabled: !ENV [MATERIAL_INSIDERS, false] + plugins: + - typeset + +extra: + social: + - icon: fontawesome/brands/github + link: https://github.com/pawamoy + - icon: fontawesome/brands/mastodon + link: https://fosstodon.org/@pawamoy + - icon: fontawesome/brands/twitter + link: https://twitter.com/pawamoy + - icon: fontawesome/brands/gitter + link: https://gitter.im/griffe-pydantic/community + - icon: fontawesome/brands/python + link: https://pypi.org/project/griffe-pydantic/ + analytics: + feedback: + title: Was this page helpful? + ratings: + - icon: material/emoticon-happy-outline + name: This page was helpful + data: 1 + note: Thanks for your feedback! + - icon: material/emoticon-sad-outline + name: This page could be improved + data: 0 + note: Let us know how we can improve this page. diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..7ccf898 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,108 @@ +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" + +[project] +name = "griffe-pydantic" +description = "Griffe extension for Pydantic." +authors = [{name = "Timothée Mazzucotelli", email = "dev@pawamoy.fr"}] +license = {text = "ISC"} +readme = "README.md" +requires-python = ">=3.9" +keywords = [] +dynamic = ["version"] +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Topic :: Documentation", + "Topic :: Software Development", + "Topic :: Utilities", + "Typing :: Typed", +] +dependencies = [ + "griffe>=0.49", +] + +[project.urls] +Homepage = "https://mkdocstrings.github.io/griffe-pydantic" +Documentation = "https://mkdocstrings.github.io/griffe-pydantic" +Changelog = "https://mkdocstrings.github.io/griffe-pydantic/changelog" +Repository = "https://github.com/mkdocstrings/griffe-pydantic" +Issues = "https://github.com/mkdocstrings/griffe-pydantic/issues" +Discussions = "https://github.com/mkdocstrings/griffe-pydantic/discussions" +Gitter = "https://gitter.im/mkdocstrings/griffe-pydantic" +Funding = "https://github.com/sponsors/pawamoy" + +[project.entry-points."mkdocstrings.python.templates"] +griffe-pydantic = "griffe_pydantic:get_templates_path" + +[tool.pdm] +version = {source = "scm"} + +[tool.pdm.build] +package-dir = "src" +editable-backend = "editables" +excludes = ["**/.pytest_cache"] +source-includes = [ + "config", + "docs", + "scripts", + "share", + "tests", + "duties.py", + "mkdocs.yml", + "*.md", + "LICENSE", +] + +[tool.pdm.build.wheel-data] +data = [ + {path = "share/**/*", relative-to = "."}, +] + +[tool.uv] +dev-dependencies = [ + # dev + "editables>=0.5", + + # maintenance + "build>=1.2", + "git-changelog>=2.5", + "twine>=5.1", + + # ci + "duty>=1.4", + "ruff>=0.4", + "pytest>=8.2", + "pytest-cov>=5.0", + "pytest-randomly>=3.15", + "pytest-xdist>=3.6", + "mypy>=1.10", + "pydantic>=2.6", + "types-markdown>=3.6", + "types-pyyaml>=6.0", + + # docs + "black>=24.4", + "markdown-callouts>=0.4", + "markdown-exec>=1.8", + "mkdocs>=1.6", + "mkdocs-coverage>=1.0", + "mkdocs-gen-files>=0.5", + "mkdocs-git-revision-date-localized-plugin>=1.2", + "mkdocs-literate-nav>=0.6", + "mkdocs-material>=9.5", + "mkdocs-minify-plugin>=0.8", + "mkdocstrings[python]>=0.25", + # YORE: EOL 3.10: Remove line. + "tomli>=2.0; python_version < '3.11'", +] \ No newline at end of file diff --git a/scripts/gen_credits.py b/scripts/gen_credits.py new file mode 100644 index 0000000..ab60181 --- /dev/null +++ b/scripts/gen_credits.py @@ -0,0 +1,179 @@ +"""Script to generate the project's credits.""" + +from __future__ import annotations + +import os +import sys +from collections import defaultdict +from collections.abc import Iterable +from importlib.metadata import distributions +from itertools import chain +from pathlib import Path +from textwrap import dedent +from typing import Union + +from jinja2 import StrictUndefined +from jinja2.sandbox import SandboxedEnvironment +from packaging.requirements import Requirement + +# YORE: EOL 3.10: Replace block with line 2. +if sys.version_info >= (3, 11): + import tomllib +else: + import tomli as tomllib + +project_dir = Path(os.getenv("MKDOCS_CONFIG_DIR", ".")) +with project_dir.joinpath("pyproject.toml").open("rb") as pyproject_file: + pyproject = tomllib.load(pyproject_file) +project = pyproject["project"] +project_name = project["name"] +devdeps = [dep for dep in pyproject["tool"]["uv"]["dev-dependencies"] if not dep.startswith("-e")] + +PackageMetadata = dict[str, Union[str, Iterable[str]]] +Metadata = dict[str, PackageMetadata] + + +def _merge_fields(metadata: dict) -> PackageMetadata: + fields = defaultdict(list) + for header, value in metadata.items(): + fields[header.lower()].append(value.strip()) + return { + field: value if len(value) > 1 or field in ("classifier", "requires-dist") else value[0] + for field, value in fields.items() + } + + +def _norm_name(name: str) -> str: + return name.replace("_", "-").replace(".", "-").lower() + + +def _requirements(deps: list[str]) -> dict[str, Requirement]: + return {_norm_name((req := Requirement(dep)).name): req for dep in deps} + + +def _extra_marker(req: Requirement) -> str | None: + if not req.marker: + return None + try: + return next(marker[2].value for marker in req.marker._markers if getattr(marker[0], "value", None) == "extra") + except StopIteration: + return None + + +def _get_metadata() -> Metadata: + metadata = {} + for pkg in distributions(): + name = _norm_name(pkg.name) # type: ignore[attr-defined,unused-ignore] + metadata[name] = _merge_fields(pkg.metadata) # type: ignore[arg-type] + metadata[name]["spec"] = set() + metadata[name]["extras"] = set() + metadata[name].setdefault("summary", "") + _set_license(metadata[name]) + return metadata + + +def _set_license(metadata: PackageMetadata) -> None: + license_field = metadata.get("license-expression", metadata.get("license", "")) + license_name = license_field if isinstance(license_field, str) else " + ".join(license_field) + check_classifiers = license_name in ("UNKNOWN", "Dual License", "") or license_name.count("\n") + if check_classifiers: + license_names = [] + for classifier in metadata["classifier"]: + if classifier.startswith("License ::"): + license_names.append(classifier.rsplit("::", 1)[1].strip()) + license_name = " + ".join(license_names) + metadata["license"] = license_name or "?" + + +def _get_deps(base_deps: dict[str, Requirement], metadata: Metadata) -> Metadata: + deps = {} + for dep_name, dep_req in base_deps.items(): + if dep_name not in metadata or dep_name == "griffe-pydantic": + continue + metadata[dep_name]["spec"] |= {str(spec) for spec in dep_req.specifier} # type: ignore[operator] + metadata[dep_name]["extras"] |= dep_req.extras # type: ignore[operator] + deps[dep_name] = metadata[dep_name] + + again = True + while again: + again = False + for pkg_name in metadata: + if pkg_name in deps: + for pkg_dependency in metadata[pkg_name].get("requires-dist", []): + requirement = Requirement(pkg_dependency) + dep_name = _norm_name(requirement.name) + extra_marker = _extra_marker(requirement) + if ( + dep_name in metadata + and dep_name not in deps + and dep_name != project["name"] + and (not extra_marker or extra_marker in deps[pkg_name]["extras"]) + ): + metadata[dep_name]["spec"] |= {str(spec) for spec in requirement.specifier} # type: ignore[operator] + deps[dep_name] = metadata[dep_name] + again = True + + return deps + + +def _render_credits() -> str: + metadata = _get_metadata() + dev_dependencies = _get_deps(_requirements(devdeps), metadata) + prod_dependencies = _get_deps( + _requirements( + chain( # type: ignore[arg-type] + project.get("dependencies", []), + chain(*project.get("optional-dependencies", {}).values()), + ), + ), + metadata, + ) + + template_data = { + "project_name": project_name, + "prod_dependencies": sorted(prod_dependencies.values(), key=lambda dep: str(dep["name"]).lower()), + "dev_dependencies": sorted(dev_dependencies.values(), key=lambda dep: str(dep["name"]).lower()), + "more_credits": "http://pawamoy.github.io/credits/", + } + template_text = dedent( + """ + # Credits + + These projects were used to build *{{ project_name }}*. **Thank you!** + + [Python](https://www.python.org/) | + [uv](https://github.com/astral-sh/uv) | + [copier-uv](https://github.com/pawamoy/copier-uv) + + {% macro dep_line(dep) -%} + [{{ dep.name }}](https://pypi.org/project/{{ dep.name }}/) | {{ dep.summary }} | {{ ("`" ~ dep.spec|sort(reverse=True)|join(", ") ~ "`") if dep.spec else "" }} | `{{ dep.version }}` | {{ dep.license }} + {%- endmacro %} + + {% if prod_dependencies -%} + ### Runtime dependencies + + Project | Summary | Version (accepted) | Version (last resolved) | License + ------- | ------- | ------------------ | ----------------------- | ------- + {% for dep in prod_dependencies -%} + {{ dep_line(dep) }} + {% endfor %} + + {% endif -%} + {% if dev_dependencies -%} + ### Development dependencies + + Project | Summary | Version (accepted) | Version (last resolved) | License + ------- | ------- | ------------------ | ----------------------- | ------- + {% for dep in dev_dependencies -%} + {{ dep_line(dep) }} + {% endfor %} + + {% endif -%} + {% if more_credits %}**[More credits from the author]({{ more_credits }})**{% endif %} + """, + ) + jinja_env = SandboxedEnvironment(undefined=StrictUndefined) + return jinja_env.from_string(template_text).render(**template_data) + + +print(_render_credits()) diff --git a/scripts/gen_ref_nav.py b/scripts/gen_ref_nav.py new file mode 100644 index 0000000..6939e86 --- /dev/null +++ b/scripts/gen_ref_nav.py @@ -0,0 +1,37 @@ +"""Generate the code reference pages and navigation.""" + +from pathlib import Path + +import mkdocs_gen_files + +nav = mkdocs_gen_files.Nav() +mod_symbol = '
'
+
+root = Path(__file__).parent.parent
+src = root / "src"
+
+for path in sorted(src.rglob("*.py")):
+ module_path = path.relative_to(src).with_suffix("")
+ doc_path = path.relative_to(src).with_suffix(".md")
+ full_doc_path = Path("reference", doc_path)
+
+ parts = tuple(module_path.parts)
+
+ if parts[-1] == "__init__":
+ parts = parts[:-1]
+ doc_path = doc_path.with_name("index.md")
+ full_doc_path = full_doc_path.with_name("index.md")
+ elif parts[-1].startswith("_"):
+ continue
+
+ nav_parts = [f"{mod_symbol} {part}" for part in parts]
+ nav[tuple(nav_parts)] = doc_path.as_posix()
+
+ with mkdocs_gen_files.open(full_doc_path, "w") as fd:
+ ident = ".".join(parts)
+ fd.write(f"---\ntitle: {ident}\n---\n\n::: {ident}")
+
+ mkdocs_gen_files.set_edit_path(full_doc_path, ".." / path.relative_to(root))
+
+with mkdocs_gen_files.open("reference/SUMMARY.md", "w") as nav_file:
+ nav_file.writelines(nav.build_literate_nav())
diff --git a/scripts/insiders.py b/scripts/insiders.py
new file mode 100644
index 0000000..849c631
--- /dev/null
+++ b/scripts/insiders.py
@@ -0,0 +1,206 @@
+"""Functions related to Insiders funding goals."""
+
+from __future__ import annotations
+
+import json
+import logging
+import os
+import posixpath
+from dataclasses import dataclass
+from datetime import date, datetime, timedelta
+from itertools import chain
+from pathlib import Path
+from typing import TYPE_CHECKING, cast
+from urllib.error import HTTPError
+from urllib.parse import urljoin
+from urllib.request import urlopen
+
+import yaml
+
+if TYPE_CHECKING:
+ from collections.abc import Iterable
+
+logger = logging.getLogger(f"mkdocs.logs.{__name__}")
+
+
+def human_readable_amount(amount: int) -> str: # noqa: D103
+ str_amount = str(amount)
+ if len(str_amount) >= 4: # noqa: PLR2004
+ return f"{str_amount[:len(str_amount)-3]},{str_amount[-3:]}"
+ return str_amount
+
+
+@dataclass
+class Project:
+ """Class representing an Insiders project."""
+
+ name: str
+ url: str
+
+
+@dataclass
+class Feature:
+ """Class representing an Insiders feature."""
+
+ name: str
+ ref: str | None
+ since: date | None
+ project: Project | None
+
+ def url(self, rel_base: str = "..") -> str | None: # noqa: D102
+ if not self.ref:
+ return None
+ if self.project:
+ rel_base = self.project.url
+ return posixpath.join(rel_base, self.ref.lstrip("/"))
+
+ def render(self, rel_base: str = "..", *, badge: bool = False) -> None: # noqa: D102
+ new = ""
+ if badge:
+ recent = self.since and date.today() - self.since <= timedelta(days=60) # noqa: DTZ011
+ if recent:
+ ft_date = self.since.strftime("%B %d, %Y") # type: ignore[union-attr]
+ new = f' :material-alert-decagram:{{ .new-feature .vibrate title="Added on {ft_date}" }}'
+ project = f"[{self.project.name}]({self.project.url}) — " if self.project else ""
+ feature = f"[{self.name}]({self.url(rel_base)})" if self.ref else self.name
+ print(f"- [{'x' if self.since else ' '}] {project}{feature}{new}")
+
+
+@dataclass
+class Goal:
+ """Class representing an Insiders goal."""
+
+ name: str
+ amount: int
+ features: list[Feature]
+ complete: bool = False
+
+ @property
+ def human_readable_amount(self) -> str: # noqa: D102
+ return human_readable_amount(self.amount)
+
+ def render(self, rel_base: str = "..") -> None: # noqa: D102
+ print(f"#### $ {self.human_readable_amount} — {self.name}\n")
+ if self.features:
+ for feature in self.features:
+ feature.render(rel_base)
+ print("")
+ else:
+ print("There are no features in this goal for this project. ")
+ print(
+ "[See the features in this goal **for all Insiders projects.**]"
+ f"(https://pawamoy.github.io/insiders/#{self.amount}-{self.name.lower().replace(' ', '-')})",
+ )
+
+
+def load_goals(data: str, funding: int = 0, project: Project | None = None) -> dict[int, Goal]:
+ """Load goals from JSON data.
+
+ Parameters:
+ data: The JSON data.
+ funding: The current total funding, per month.
+ origin: The origin of the data (URL).
+
+ Returns:
+ A dictionaries of goals, keys being their target monthly amount.
+ """
+ goals_data = yaml.safe_load(data)["goals"]
+ return {
+ amount: Goal(
+ name=goal_data["name"],
+ amount=amount,
+ complete=funding >= amount,
+ features=[
+ Feature(
+ name=feature_data["name"],
+ ref=feature_data.get("ref"),
+ since=feature_data.get("since") and datetime.strptime(feature_data["since"], "%Y/%m/%d").date(), # noqa: DTZ007
+ project=project,
+ )
+ for feature_data in goal_data["features"]
+ ],
+ )
+ for amount, goal_data in goals_data.items()
+ }
+
+
+def _load_goals_from_disk(path: str, funding: int = 0) -> dict[int, Goal]:
+ project_dir = os.getenv("MKDOCS_CONFIG_DIR", ".")
+ try:
+ data = Path(project_dir, path).read_text()
+ except OSError as error:
+ raise RuntimeError(f"Could not load data from disk: {path}") from error
+ return load_goals(data, funding)
+
+
+def _load_goals_from_url(source_data: tuple[str, str, str], funding: int = 0) -> dict[int, Goal]:
+ project_name, project_url, data_fragment = source_data
+ data_url = urljoin(project_url, data_fragment)
+ try:
+ with urlopen(data_url) as response: # noqa: S310
+ data = response.read()
+ except HTTPError as error:
+ raise RuntimeError(f"Could not load data from network: {data_url}") from error
+ return load_goals(data, funding, project=Project(name=project_name, url=project_url))
+
+
+def _load_goals(source: str | tuple[str, str, str], funding: int = 0) -> dict[int, Goal]:
+ if isinstance(source, str):
+ return _load_goals_from_disk(source, funding)
+ return _load_goals_from_url(source, funding)
+
+
+def funding_goals(source: str | list[str | tuple[str, str, str]], funding: int = 0) -> dict[int, Goal]:
+ """Load funding goals from a given data source.
+
+ Parameters:
+ source: The data source (local file path or URL).
+ funding: The current total funding, per month.
+
+ Returns:
+ A dictionaries of goals, keys being their target monthly amount.
+ """
+ if isinstance(source, str):
+ return _load_goals_from_disk(source, funding)
+ goals = {}
+ for src in source:
+ source_goals = _load_goals(src, funding)
+ for amount, goal in source_goals.items():
+ if amount not in goals:
+ goals[amount] = goal
+ else:
+ goals[amount].features.extend(goal.features)
+ return {amount: goals[amount] for amount in sorted(goals)}
+
+
+def feature_list(goals: Iterable[Goal]) -> list[Feature]:
+ """Extract feature list from funding goals.
+
+ Parameters:
+ goals: A list of funding goals.
+
+ Returns:
+ A list of features.
+ """
+ return list(chain.from_iterable(goal.features for goal in goals))
+
+
+def load_json(url: str) -> str | list | dict: # noqa: D103
+ with urlopen(url) as response: # noqa: S310
+ return json.loads(response.read().decode())
+
+
+data_source = globals()["data_source"]
+sponsor_url = "https://github.com/sponsors/pawamoy"
+data_url = "https://raw.githubusercontent.com/pawamoy/sponsors/main"
+numbers: dict[str, int] = load_json(f"{data_url}/numbers.json") # type: ignore[assignment]
+sponsors: list[dict] = load_json(f"{data_url}/sponsors.json") # type: ignore[assignment]
+current_funding = numbers["total"]
+sponsors_count = numbers["count"]
+goals = funding_goals(data_source, funding=current_funding)
+ongoing_goals = [goal for goal in goals.values() if not goal.complete]
+unreleased_features = sorted(
+ (ft for ft in feature_list(ongoing_goals) if ft.since),
+ key=lambda ft: cast(date, ft.since),
+ reverse=True,
+)
diff --git a/scripts/make b/scripts/make
new file mode 100755
index 0000000..ac43062
--- /dev/null
+++ b/scripts/make
@@ -0,0 +1,190 @@
+#!/usr/bin/env python3
+"""Management commands."""
+
+from __future__ import annotations
+
+import os
+import shutil
+import subprocess
+import sys
+from contextlib import contextmanager
+from pathlib import Path
+from textwrap import dedent
+from typing import Any, Iterator
+
+PYTHON_VERSIONS = os.getenv("PYTHON_VERSIONS", "3.9 3.10 3.11 3.12 3.13 3.14").split()
+
+
+def shell(cmd: str, capture_output: bool = False, **kwargs: Any) -> str | None:
+ """Run a shell command."""
+ if capture_output:
+ return subprocess.check_output(cmd, shell=True, text=True, **kwargs) # noqa: S602
+ subprocess.run(cmd, shell=True, check=True, stderr=subprocess.STDOUT, **kwargs) # noqa: S602
+ return None
+
+
+@contextmanager
+def environ(**kwargs: str) -> Iterator[None]:
+ """Temporarily set environment variables."""
+ original = dict(os.environ)
+ os.environ.update(kwargs)
+ try:
+ yield
+ finally:
+ os.environ.clear()
+ os.environ.update(original)
+
+
+def uv_install(venv: Path) -> None:
+ """Install dependencies using uv."""
+ with environ(UV_PROJECT_ENVIRONMENT=str(venv), PYO3_USE_ABI3_FORWARD_COMPATIBILITY="1"):
+ if "CI" in os.environ:
+ shell("uv sync --no-editable")
+ else:
+ shell("uv sync")
+
+
+def setup() -> None:
+ """Setup the project."""
+ if not shutil.which("uv"):
+ raise ValueError("make: setup: uv must be installed, see https://github.com/astral-sh/uv")
+
+ print("Installing dependencies (default environment)") # noqa: T201
+ default_venv = Path(".venv")
+ if not default_venv.exists():
+ shell("uv venv --python python")
+ uv_install(default_venv)
+
+ if PYTHON_VERSIONS:
+ for version in PYTHON_VERSIONS:
+ print(f"\nInstalling dependencies (python{version})") # noqa: T201
+ venv_path = Path(f".venvs/{version}")
+ if not venv_path.exists():
+ shell(f"uv venv --python {version} {venv_path}")
+ with environ(UV_PROJECT_ENVIRONMENT=str(venv_path.resolve())):
+ uv_install(venv_path)
+
+
+def run(version: str, cmd: str, *args: str, no_sync: bool = False, **kwargs: Any) -> None:
+ """Run a command in a virtual environment."""
+ kwargs = {"check": True, **kwargs}
+ uv_run = ["uv", "run"]
+ if no_sync:
+ uv_run.append("--no-sync")
+ if version == "default":
+ with environ(UV_PROJECT_ENVIRONMENT=".venv"):
+ subprocess.run([*uv_run, cmd, *args], **kwargs) # noqa: S603, PLW1510
+ else:
+ with environ(UV_PROJECT_ENVIRONMENT=f".venvs/{version}", MULTIRUN="1"):
+ subprocess.run([*uv_run, cmd, *args], **kwargs) # noqa: S603, PLW1510
+
+
+def multirun(cmd: str, *args: str, **kwargs: Any) -> None:
+ """Run a command for all configured Python versions."""
+ if PYTHON_VERSIONS:
+ for version in PYTHON_VERSIONS:
+ run(version, cmd, *args, **kwargs)
+ else:
+ run("default", cmd, *args, **kwargs)
+
+
+def allrun(cmd: str, *args: str, **kwargs: Any) -> None:
+ """Run a command in all virtual environments."""
+ run("default", cmd, *args, **kwargs)
+ if PYTHON_VERSIONS:
+ multirun(cmd, *args, **kwargs)
+
+
+def clean() -> None:
+ """Delete build artifacts and cache files."""
+ paths_to_clean = ["build", "dist", "htmlcov", "site", ".coverage*", ".pdm-build"]
+ for path in paths_to_clean:
+ shell(f"rm -rf {path}")
+
+ cache_dirs = {".cache", ".pytest_cache", ".mypy_cache", ".ruff_cache", "__pycache__"}
+ for dirpath in Path(".").rglob("*/"):
+ if dirpath.parts[0] not in (".venv", ".venvs") and dirpath.name in cache_dirs:
+ shutil.rmtree(dirpath, ignore_errors=True)
+
+
+def vscode() -> None:
+ """Configure VSCode to work on this project."""
+ Path(".vscode").mkdir(parents=True, exist_ok=True)
+ shell("cp -v config/vscode/* .vscode")
+
+
+def main() -> int:
+ """Main entry point."""
+ args = list(sys.argv[1:])
+ if not args or args[0] == "help":
+ if len(args) > 1:
+ run("default", "duty", "--help", args[1])
+ else:
+ print(
+ dedent(
+ """
+ Available commands
+ help Print this help. Add task name to print help.
+ setup Setup all virtual environments (install dependencies).
+ run Run a command in the default virtual environment.
+ multirun Run a command for all configured Python versions.
+ allrun Run a command in all virtual environments.
+ 3.x Run a command in the virtual environment for Python 3.x.
+ clean Delete build artifacts and cache files.
+ vscode Configure VSCode to work on this project.
+ """
+ ),
+ flush=True,
+ ) # noqa: T201
+ if os.path.exists(".venv"):
+ print("\nAvailable tasks", flush=True) # noqa: T201
+ run("default", "duty", "--list", no_sync=True)
+ return 0
+
+ while args:
+ cmd = args.pop(0)
+
+ if cmd == "run":
+ run("default", *args)
+ return 0
+
+ if cmd == "multirun":
+ multirun(*args)
+ return 0
+
+ if cmd == "allrun":
+ allrun(*args)
+ return 0
+
+ if cmd.startswith("3."):
+ run(cmd, *args)
+ return 0
+
+ opts = []
+ while args and (args[0].startswith("-") or "=" in args[0]):
+ opts.append(args.pop(0))
+
+ if cmd == "clean":
+ clean()
+ elif cmd == "setup":
+ setup()
+ elif cmd == "vscode":
+ vscode()
+ elif cmd == "check":
+ multirun("duty", "check-quality", "check-types", "check-docs")
+ run("default", "duty", "check-api")
+ elif cmd in {"check-quality", "check-docs", "check-types", "test"}:
+ multirun("duty", cmd, *opts)
+ else:
+ run("default", "duty", cmd, *opts)
+
+ return 0
+
+
+if __name__ == "__main__":
+ try:
+ sys.exit(main())
+ except subprocess.CalledProcessError as process:
+ if process.output:
+ print(process.output, file=sys.stderr) # noqa: T201
+ sys.exit(process.returncode)
diff --git a/src/griffe_pydantic/__init__.py b/src/griffe_pydantic/__init__.py
new file mode 100644
index 0000000..cc05b18
--- /dev/null
+++ b/src/griffe_pydantic/__init__.py
@@ -0,0 +1,18 @@
+"""griffe-pydantic package.
+
+Griffe extension for Pydantic.
+"""
+
+from __future__ import annotations
+
+from pathlib import Path
+
+from griffe_pydantic.extension import PydanticExtension
+
+
+def get_templates_path() -> Path:
+ """Return the templates directory path."""
+ return Path(__file__).parent / "templates"
+
+
+__all__: list[str] = ["get_templates_path", "PydanticExtension"]
diff --git a/src/griffe_pydantic/common.py b/src/griffe_pydantic/common.py
new file mode 100644
index 0000000..612a063
--- /dev/null
+++ b/src/griffe_pydantic/common.py
@@ -0,0 +1,78 @@
+"""Griffe extension for Pydantic."""
+
+from __future__ import annotations
+
+import json
+from functools import partial
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from collections.abc import Sequence
+
+ from griffe import Attribute, Class, Function
+ from pydantic import BaseModel
+
+self_namespace = "griffe_pydantic"
+mkdocstrings_namespace = "mkdocstrings"
+
+field_constraints = {
+ "gt",
+ "ge",
+ "lt",
+ "le",
+ "multiple_of",
+ "min_length",
+ "max_length",
+ "pattern",
+ "allow_inf_nan",
+ "max_digits",
+ "decimal_place",
+}
+
+
+def _model_fields(cls: Class) -> dict[str, Attribute]:
+ return {name: attr for name, attr in cls.members.items() if "pydantic-field" in attr.labels} # type: ignore[misc]
+
+
+def _model_validators(cls: Class) -> dict[str, Function]:
+ return {name: func for name, func in cls.members.items() if "pydantic-validator" in func.labels} # type: ignore[misc]
+
+
+def json_schema(model: type[BaseModel]) -> str:
+ """Produce a model schema as JSON.
+
+ Parameters:
+ model: A Pydantic model.
+
+ Returns:
+ A schema as JSON.
+ """
+ return json.dumps(model.model_json_schema(), indent=2)
+
+
+def process_class(cls: Class) -> None:
+ """Set metadata on a Pydantic model.
+
+ Parameters:
+ cls: The Griffe class representing the Pydantic model.
+ """
+ cls.labels.add("pydantic-model")
+ cls.extra[self_namespace]["fields"] = partial(_model_fields, cls)
+ cls.extra[self_namespace]["validators"] = partial(_model_validators, cls)
+ cls.extra[mkdocstrings_namespace]["template"] = "pydantic_model.html.jinja"
+
+
+def process_function(func: Function, cls: Class, fields: Sequence[str]) -> None:
+ """Set metadata on a Pydantic validator.
+
+ Parameters:
+ cls: A Griffe function representing the Pydantic validator.
+ """
+ func.labels = {"pydantic-validator"}
+ targets = [cls.members[field] for field in fields]
+
+ func.extra[self_namespace].setdefault("targets", [])
+ func.extra[self_namespace]["targets"].extend(targets)
+ for target in targets:
+ target.extra[self_namespace].setdefault("validators", [])
+ target.extra[self_namespace]["validators"].append(func)
diff --git a/src/griffe_pydantic/debug.py b/src/griffe_pydantic/debug.py
new file mode 100644
index 0000000..c4c161c
--- /dev/null
+++ b/src/griffe_pydantic/debug.py
@@ -0,0 +1,109 @@
+"""Debugging utilities."""
+
+from __future__ import annotations
+
+import os
+import platform
+import sys
+from dataclasses import dataclass
+from importlib import metadata
+
+
+@dataclass
+class Variable:
+ """Dataclass describing an environment variable."""
+
+ name: str
+ """Variable name."""
+ value: str
+ """Variable value."""
+
+
+@dataclass
+class Package:
+ """Dataclass describing a Python package."""
+
+ name: str
+ """Package name."""
+ version: str
+ """Package version."""
+
+
+@dataclass
+class Environment:
+ """Dataclass to store environment information."""
+
+ interpreter_name: str
+ """Python interpreter name."""
+ interpreter_version: str
+ """Python interpreter version."""
+ interpreter_path: str
+ """Path to Python executable."""
+ platform: str
+ """Operating System."""
+ packages: list[Package]
+ """Installed packages."""
+ variables: list[Variable]
+ """Environment variables."""
+
+
+def _interpreter_name_version() -> tuple[str, str]:
+ if hasattr(sys, "implementation"):
+ impl = sys.implementation.version
+ version = f"{impl.major}.{impl.minor}.{impl.micro}"
+ kind = impl.releaselevel
+ if kind != "final":
+ version += kind[0] + str(impl.serial)
+ return sys.implementation.name, version
+ return "", "0.0.0"
+
+
+def get_version(dist: str = "griffe-pydantic") -> str:
+ """Get version of the given distribution.
+
+ Parameters:
+ dist: A distribution name.
+
+ Returns:
+ A version number.
+ """
+ try:
+ return metadata.version(dist)
+ except metadata.PackageNotFoundError:
+ return "0.0.0"
+
+
+def get_debug_info() -> Environment:
+ """Get debug/environment information.
+
+ Returns:
+ Environment information.
+ """
+ py_name, py_version = _interpreter_name_version()
+ packages = ["griffe-pydantic"]
+ variables = ["PYTHONPATH", *[var for var in os.environ if var.startswith("GRIFFE_PYDANTIC")]]
+ return Environment(
+ interpreter_name=py_name,
+ interpreter_version=py_version,
+ interpreter_path=sys.executable,
+ platform=platform.platform(),
+ variables=[Variable(var, val) for var in variables if (val := os.getenv(var))],
+ packages=[Package(pkg, get_version(pkg)) for pkg in packages],
+ )
+
+
+def print_debug_info() -> None:
+ """Print debug/environment information."""
+ info = get_debug_info()
+ print(f"- __System__: {info.platform}")
+ print(f"- __Python__: {info.interpreter_name} {info.interpreter_version} ({info.interpreter_path})")
+ print("- __Environment variables__:")
+ for var in info.variables:
+ print(f" - `{var.name}`: `{var.value}`")
+ print("- __Installed packages__:")
+ for pkg in info.packages:
+ print(f" - `{pkg.name}` v{pkg.version}")
+
+
+if __name__ == "__main__":
+ print_debug_info()
diff --git a/src/griffe_pydantic/dynamic.py b/src/griffe_pydantic/dynamic.py
new file mode 100644
index 0000000..f0ca360
--- /dev/null
+++ b/src/griffe_pydantic/dynamic.py
@@ -0,0 +1,55 @@
+"""Griffe extension for Pydantic."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from griffe import (
+ Attribute,
+ Class,
+ Docstring,
+ Function,
+ get_logger,
+)
+from pydantic.fields import FieldInfo
+
+from griffe_pydantic import common
+
+if TYPE_CHECKING:
+ from griffe import ObjectNode
+
+logger = get_logger(__name__)
+
+
+def process_attribute(node: ObjectNode, attr: Attribute, cls: Class) -> None:
+ """Handle Pydantic fields."""
+ if attr.name == "model_config":
+ cls.extra[common.self_namespace]["config"] = node.obj
+ return
+
+ if not isinstance(node.obj, FieldInfo):
+ return
+
+ attr.labels = {"pydantic-field"}
+ attr.value = node.obj.default
+ constraints = {}
+ for constraint in common.field_constraints:
+ if (value := getattr(node.obj, constraint, None)) is not None:
+ constraints[constraint] = value
+ attr.extra[common.self_namespace]["constraints"] = constraints
+
+ # Populate docstring from the field's `description` argument.
+ if not attr.docstring and (docstring := node.obj.description):
+ attr.docstring = Docstring(docstring, parent=attr)
+
+
+def process_function(node: ObjectNode, func: Function, cls: Class) -> None:
+ """Handle Pydantic field validators."""
+ if dec_info := getattr(node.obj, "decorator_info", None):
+ common.process_function(func, cls, dec_info.fields)
+
+
+def process_class(node: ObjectNode, cls: Class) -> None:
+ """Detect and prepare Pydantic models."""
+ common.process_class(cls)
+ cls.extra[common.self_namespace]["schema"] = common.json_schema(node.obj)
diff --git a/src/griffe_pydantic/extension.py b/src/griffe_pydantic/extension.py
new file mode 100644
index 0000000..8b61799
--- /dev/null
+++ b/src/griffe_pydantic/extension.py
@@ -0,0 +1,89 @@
+"""Griffe extension for Pydantic."""
+
+from __future__ import annotations
+
+import ast
+from typing import TYPE_CHECKING, Any
+
+from griffe import (
+ Attribute,
+ Class,
+ Extension,
+ Function,
+ Module,
+ get_logger,
+)
+
+from griffe_pydantic import dynamic, static
+
+if TYPE_CHECKING:
+ from griffe import ObjectNode
+
+
+logger = get_logger(__name__)
+
+
+class PydanticExtension(Extension):
+ """Griffe extension for Pydantic."""
+
+ def __init__(self, *, schema: bool = False) -> None:
+ """Initialize the extension.
+
+ Parameters:
+ schema: Whether to compute and store the JSON schema of models.
+ """
+ super().__init__()
+ self.schema = schema
+ self.in_model: list[Class] = []
+ self.processed: set[str] = set()
+
+ def on_package_loaded(self, *, pkg: Module, **kwargs: Any) -> None: # noqa: ARG002
+ """Detect models once the whole package is loaded."""
+ static.process_module(pkg, processed=self.processed, schema=self.schema)
+
+ def on_class_instance(self, *, node: ast.AST | ObjectNode, cls: Class, **kwargs: Any) -> None: # noqa: ARG002
+ """Detect and prepare Pydantic models."""
+ # Prevent running during static analysis.
+ if isinstance(node, ast.AST):
+ return
+
+ try:
+ import pydantic
+ except ImportError:
+ logger.warning("could not import pydantic - models will not be detected")
+ return
+
+ if issubclass(node.obj, pydantic.BaseModel):
+ self.in_model.append(cls)
+ dynamic.process_class(node, cls)
+ self.processed.add(cls.canonical_path)
+
+ def on_attribute_instance(self, *, node: ast.AST | ObjectNode, attr: Attribute, **kwargs: Any) -> None: # noqa: ARG002
+ """Handle Pydantic fields."""
+ # Prevent running during static analysis.
+ if isinstance(node, ast.AST):
+ return
+ if self.in_model:
+ cls = self.in_model[-1]
+ dynamic.process_attribute(node, attr, cls)
+ self.processed.add(attr.canonical_path)
+
+ def on_function_instance(self, *, node: ast.AST | ObjectNode, func: Function, **kwargs: Any) -> None: # noqa: ARG002
+ """Handle Pydantic field validators."""
+ # Prevent running during static analysis.
+ if isinstance(node, ast.AST):
+ return
+ if self.in_model:
+ cls = self.in_model[-1]
+ dynamic.process_function(node, func, cls)
+ self.processed.add(func.canonical_path)
+
+ def on_class_members(self, *, node: ast.AST | ObjectNode, cls: Class, **kwargs: Any) -> None: # noqa: ARG002
+ """Finalize the Pydantic model data."""
+ # Prevent running during static analysis.
+ if isinstance(node, ast.AST):
+ return
+
+ if self.in_model and cls is self.in_model[-1]:
+ # Pop last class from the heap.
+ self.in_model.pop()
diff --git a/src/griffe_pydantic/py.typed b/src/griffe_pydantic/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/src/griffe_pydantic/static.py b/src/griffe_pydantic/static.py
new file mode 100644
index 0000000..d7fd046
--- /dev/null
+++ b/src/griffe_pydantic/static.py
@@ -0,0 +1,168 @@
+"""Griffe extension for Pydantic."""
+
+from __future__ import annotations
+
+import ast
+import sys
+from typing import TYPE_CHECKING
+
+from griffe import (
+ Alias,
+ Attribute,
+ Class,
+ Docstring,
+ Expr,
+ ExprCall,
+ ExprKeyword,
+ ExprName,
+ Function,
+ Module,
+ dynamic_import,
+ get_logger,
+)
+
+from griffe_pydantic import common
+
+if TYPE_CHECKING:
+ from pathlib import Path
+
+
+logger = get_logger(__name__)
+
+
+def inherits_pydantic(cls: Class) -> bool:
+ """Tell whether a class inherits from a Pydantic model.
+
+ Parameters:
+ cls: A Griffe class.
+
+ Returns:
+ True/False.
+ """
+ for base in cls.bases:
+ if isinstance(base, (ExprName, Expr)):
+ base = base.canonical_path # noqa: PLW2901
+ if base in {"pydantic.BaseModel", "pydantic.main.BaseModel"}:
+ return True
+
+ return any(inherits_pydantic(parent_class) for parent_class in cls.mro())
+
+
+def pydantic_field_validator(func: Function) -> ExprCall | None:
+ """Return a function's `pydantic.field_validator` decorator if it exists.
+
+ Parameters:
+ func: A Griffe function.
+
+ Returns:
+ A decorator value (Griffe expression).
+ """
+ for decorator in func.decorators:
+ if isinstance(decorator.value, ExprCall) and decorator.callable_path == "pydantic.field_validator":
+ return decorator.value
+ return None
+
+
+def process_attribute(attr: Attribute, cls: Class, *, processed: set[str]) -> None:
+ """Handle Pydantic fields."""
+ if attr.canonical_path in processed:
+ return
+ processed.add(attr.canonical_path)
+
+ kwargs = {}
+ if isinstance(attr.value, ExprCall):
+ kwargs = {
+ argument.name: argument.value for argument in attr.value.arguments if isinstance(argument, ExprKeyword)
+ }
+
+ if (
+ attr.value.function.canonical_path == "pydantic.Field"
+ and len(attr.value.arguments) >= 1
+ and not isinstance(attr.value.arguments[0], ExprKeyword)
+ and attr.value.arguments[0] != "..." # handle Field(...), i.e. no default
+ ):
+ kwargs["default"] = attr.value.arguments[0]
+
+ elif attr.value is not None:
+ kwargs["default"] = attr.value
+
+ if attr.name == "model_config":
+ cls.extra[common.self_namespace]["config"] = kwargs
+ return
+
+ attr.labels = {"pydantic-field"}
+ attr.value = kwargs.get("default", None)
+ constraints = {kwarg: value for kwarg, value in kwargs.items() if kwarg not in {"default", "description"}}
+ attr.extra[common.self_namespace]["constraints"] = constraints
+
+ # Populate docstring from the field's `description` argument.
+ if not attr.docstring and (docstring := kwargs.get("description", None)):
+ attr.docstring = Docstring(ast.literal_eval(docstring), parent=attr) # type: ignore[arg-type]
+
+
+def process_function(func: Function, cls: Class, *, processed: set[str]) -> None:
+ """Handle Pydantic field validators."""
+ if func.canonical_path in processed:
+ return
+ processed.add(func.canonical_path)
+
+ if isinstance(func, Alias):
+ logger.warning(f"cannot yet process {func}")
+ return
+
+ if decorator := pydantic_field_validator(func):
+ fields = [ast.literal_eval(field) for field in decorator.arguments if isinstance(field, str)]
+ common.process_function(func, cls, fields)
+
+
+def process_class(cls: Class, *, processed: set[str], schema: bool = False) -> None:
+ """Finalize the Pydantic model data."""
+ if cls.canonical_path in processed:
+ return
+
+ if not inherits_pydantic(cls):
+ return
+
+ processed.add(cls.canonical_path)
+
+ common.process_class(cls)
+
+ if schema:
+ import_path: Path | list[Path] = cls.package.filepath
+ if isinstance(import_path, list):
+ import_path = import_path[-1]
+ if import_path.name == "__init__.py":
+ import_path = import_path.parent
+ import_path = import_path.parent
+ try:
+ true_class = dynamic_import(cls.path, import_paths=[import_path, *sys.path])
+ except ImportError:
+ logger.debug(f"Could not import class {cls.path} for JSON schema")
+ return
+ cls.extra[common.self_namespace]["schema"] = common.json_schema(true_class)
+
+ for member in cls.all_members.values():
+ if isinstance(member, Attribute):
+ process_attribute(member, cls, processed=processed)
+ elif isinstance(member, Function):
+ process_function(member, cls, processed=processed)
+ elif isinstance(member, Class):
+ process_class(member, processed=processed, schema=schema)
+
+
+def process_module(
+ mod: Module,
+ *,
+ processed: set[str],
+ schema: bool = False,
+) -> None:
+ """Handle Pydantic models in a module."""
+ if mod.canonical_path in processed:
+ return
+ processed.add(mod.canonical_path)
+
+ for cls in mod.classes.values():
+ process_class(cls, processed=processed, schema=schema)
+
+ for submodule in mod.modules.values():
+ process_module(submodule, processed=processed, schema=schema)
diff --git a/src/griffe_pydantic/templates/material/_base/pydantic_model.html.jinja b/src/griffe_pydantic/templates/material/_base/pydantic_model.html.jinja
new file mode 100644
index 0000000..196c13d
--- /dev/null
+++ b/src/griffe_pydantic/templates/material/_base/pydantic_model.html.jinja
@@ -0,0 +1,65 @@
+{% extends "_base/class.html.jinja" %}
+
+{% block contents %}
+ {% block bases %}{{ super() }}{% endblock %}
+ {% block docstring %}{{ super() }}{% endblock %}
+
+ {% block schema scoped %}
+ {% if class.extra.griffe_pydantic.schema %}
+ Config:
+{{ name }}
: {{ value|highlight(language="python", inline=True) }}Fields:
+{{ name }}
+ {% with expression = field.annotation %}
+ ({% include "expression.html.jinja" with context %}
)
+ {% endwith %}
+ Validators:
+{{ name }}
→
+ {% for target in validator.extra.griffe_pydantic.targets %}
+ {{ target.name }}
+ {%- if not loop.last %}, {% endif %}
+ {% endfor %}
+