diff --git a/README.md b/README.md index 47c9441..e67c360 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # Honcho -![Static Badge](https://img.shields.io/badge/Version-0.0.2-blue) +![Static Badge](https://img.shields.io/badge/Version-0.0.3-blue) [![Discord](https://img.shields.io/discord/1016845111637839922?style=flat&logo=discord&logoColor=23ffffff&label=Plastic%20Labs&labelColor=235865F2)](https://discord.gg/plasticlabs) ![GitHub License](https://img.shields.io/github/license/plastic-labs/honcho) ![GitHub Repo stars](https://img.shields.io/github/stars/plastic-labs/honcho) @@ -50,14 +50,21 @@ poetry install # install dependencies connection_uri. For testing sqlite is fine. The below example uses an in-memory sqlite database. -> Honcho has been tested with Postgresql and SQLite +> Honcho has been tested with Postgresql and PGVector ```env -DATABASE_TYPE=sqlite -CONNECTION_URI=sqlite:// +DATABASE_TYPE=postgres +CONNECTION_URI=postgresql://testuser:testpwd@localhost:5432/honcho ``` -3. Run the API via uvicorn +3. launch a postgresd with pgvector enabled with docker-compose + +```bash +cd honcho/api/local +docker-compose up -d +``` + +4. Run the API via uvicorn ```bash cd honcho/api # change to the api directory @@ -86,7 +93,7 @@ Docs](https://fly.io/docs/getting-started/) to setup your environment and the `flyctl`. -Once `flyctl` is set up use the the following commands to launch the application: +Once `flyctl` is set up use the following commands to launch the application: ```bash cd honcho/api diff --git a/api/.env.template b/api/.env.template index 3e73476..04ee33b 100644 --- a/api/.env.template +++ b/api/.env.template @@ -1,2 +1,7 @@ DATABASE_TYPE=sqlite CONNECTION_URI=sqlite:// + +DATABASE_TYPE=postgres +CONNECTION_URI=postgresql://testuser:testpwd@localhost:5432/honcho + +OPENAI_API_KEY= diff --git a/api/CHANGELOG.md b/api/CHANGELOG.md index 56d7915..84cd89e 100644 --- a/api/CHANGELOG.md +++ b/api/CHANGELOG.md @@ -4,6 +4,22 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## [0.0.3] — 2024-02-15 + +### Added + +* Collections table to reference a collection of embedding documents +* Documents table to hold vector embeddings for RAG workflows +* Local scripts for running a postgres database with pgvector installed +* OpenAI Dependency for embedding models +* PGvector dependency for vector db support + +### Changed + +* session_data is now metadata +* session_data is a JSON field used python `dict` for compatability + + ## [0.0.2] — 2024-02-01 ### Added diff --git a/api/local/docker-compose.yml b/api/local/docker-compose.yml new file mode 100644 index 0000000..ea0f376 --- /dev/null +++ b/api/local/docker-compose.yml @@ -0,0 +1,14 @@ +services: + db: + hostname: db + image: ankane/pgvector + ports: + - 5432:5432 + restart: always + environment: + - POSTGRES_DB=honcho + - POSTGRES_USER=testuser + - POSTGRES_PASSWORD=testpwd + - POSTGRES_HOST_AUTH_METHOD=trust + volumes: + - ./init.sql:/docker-entrypoint-initdb.d/init.sql diff --git a/api/local/init.sql b/api/local/init.sql new file mode 100644 index 0000000..0aa0fc2 --- /dev/null +++ b/api/local/init.sql @@ -0,0 +1 @@ +CREATE EXTENSION IF NOT EXISTS vector; diff --git a/api/poetry.lock b/api/poetry.lock index 252389c..fe56dc9 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -37,6 +37,18 @@ doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd- test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (<0.22)"] +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + [[package]] name = "click" version = "8.1.7" @@ -82,6 +94,18 @@ wrapt = ">=1.10,<2" [package.extras] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + [[package]] name = "exceptiongroup" version = "1.2.0" @@ -120,14 +144,14 @@ all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)" [[package]] name = "fastapi-pagination" -version = "0.12.14" +version = "0.12.15" description = "FastAPI pagination" category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "fastapi_pagination-0.12.14-py3-none-any.whl", hash = "sha256:59b6c5626b1d21c610da333c7a586d625f6c81d8fa26267a4b598aae736f6753"}, - {file = "fastapi_pagination-0.12.14.tar.gz", hash = "sha256:4148694b1e170055eea0a5e691dbc640c4bf55eb0086cf11d14b164c35660559"}, + {file = "fastapi_pagination-0.12.15-py3-none-any.whl", hash = "sha256:bcfea8622b48135ef759b926d9d09fa8e16bc8adab26ec2b65d1647e72d39988"}, + {file = "fastapi_pagination-0.12.15.tar.gz", hash = "sha256:a7e5e48cd9d183f29532455a1689dfac575877b7ff10d112ddb56cb3d047a457"}, ] [package.dependencies] @@ -236,6 +260,53 @@ files = [ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] +[[package]] +name = "httpcore" +version = "1.0.3" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"}, + {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] +trio = ["trio (>=0.22.0,<0.24.0)"] + +[[package]] +name = "httpx" +version = "0.26.0" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = ">=1.0.0,<2.0.0" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + [[package]] name = "idna" version = "3.6" @@ -269,14 +340,14 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", [[package]] name = "limits" -version = "3.7.0" +version = "3.8.0" description = "Rate limiting utilities" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "limits-3.7.0-py3-none-any.whl", hash = "sha256:c528817b7fc15f3e86ad091ba3e40231f6430a91b753db864767684cda8a7f2e"}, - {file = "limits-3.7.0.tar.gz", hash = "sha256:124c6a04d2f4b20990fb1de019eec9474d6c1346c70d8fd0561609b86998b64a"}, + {file = "limits-3.8.0-py3-none-any.whl", hash = "sha256:6e3c75712359dfaea28bee23832bd814bbe66a42c92bbd848154dfba0d4c4503"}, + {file = "limits-3.8.0.tar.gz", hash = "sha256:7dd4955dec3c7a219be04e661251ae243a48050e84053bf68b31dd07890f28c2"}, ] [package.dependencies] @@ -297,6 +368,68 @@ mongodb = ["pymongo (>4.1,<5)"] redis = ["redis (>3,!=4.5.2,!=4.5.3,<6.0.0)"] rediscluster = ["redis (>=4.2.0,!=4.5.2,!=4.5.3)"] +[[package]] +name = "numpy" +version = "1.24.4" +description = "Fundamental package for array computing in Python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, + {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, + {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, + {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, + {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, + {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, + {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, + {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, + {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, + {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +] + +[[package]] +name = "openai" +version = "1.12.0" +description = "The official Python library for the openai API" +category = "main" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-1.12.0-py3-none-any.whl", hash = "sha256:a54002c814e05222e413664f651b5916714e4700d041d5cf5724d3ae1a3e3481"}, + {file = "openai-1.12.0.tar.gz", hash = "sha256:99c5d257d09ea6533d689d1cc77caa0ac679fa21efef8893d8b0832a86877f1b"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.7,<5" + +[package.extras] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] + [[package]] name = "packaging" version = "23.2" @@ -309,6 +442,20 @@ files = [ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] +[[package]] +name = "pgvector" +version = "0.2.5" +description = "pgvector support for Python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b"}, +] + +[package.dependencies] +numpy = "*" + [[package]] name = "psycopg2-binary" version = "2.9.9" @@ -393,19 +540,19 @@ files = [ [[package]] name = "pydantic" -version = "2.5.2" +version = "2.6.1" description = "Data validation using Python type hints" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, - {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.5" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -413,117 +560,91 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.5" +version = "2.16.2" description = "" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, - {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, - {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, - {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, - {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, - {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, - {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, - {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, - {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, - {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, - {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, - {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, - {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, - {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, - {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, - {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] @@ -531,14 +652,14 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "python-dotenv" -version = "1.0.0" +version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, - {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, ] [package.extras] @@ -546,14 +667,14 @@ cli = ["click (>=5.0)"] [[package]] name = "slowapi" -version = "0.1.8" +version = "0.1.9" description = "A rate limiting extension for Starlette and Fastapi" category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "slowapi-0.1.8-py3-none-any.whl", hash = "sha256:629fc415575bbffcd9d8621cc3ce326a78402c5f9b7b50b127979118d485c72e"}, - {file = "slowapi-0.1.8.tar.gz", hash = "sha256:8cc268f5a7e3624efa3f7bd2859b895f9f2376c4ed4e0378dd2f7f3343ca608e"}, + {file = "slowapi-0.1.9-py3-none-any.whl", hash = "sha256:cfad116cfb84ad9d763ee155c1e5c5cbf00b0d47399a769b227865f5df576e36"}, + {file = "slowapi-0.1.9.tar.gz", hash = "sha256:639192d0f1ca01b1c6d95bf6c71d794c3a9ee189855337b4821f7f457dddad77"}, ] [package.dependencies] @@ -576,61 +697,61 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.25" +version = "2.0.27" description = "Database Abstraction Library" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4344d059265cc8b1b1be351bfb88749294b87a8b2bbe21dfbe066c4199541ebd"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f9e2e59cbcc6ba1488404aad43de005d05ca56e069477b33ff74e91b6319735"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84daa0a2055df9ca0f148a64fdde12ac635e30edbca80e87df9b3aaf419e144a"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc8b7dabe8e67c4832891a5d322cec6d44ef02f432b4588390017f5cec186a84"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f5693145220517b5f42393e07a6898acdfe820e136c98663b971906120549da5"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db854730a25db7c956423bb9fb4bdd1216c839a689bf9cc15fada0a7fb2f4570"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-win32.whl", hash = "sha256:14a6f68e8fc96e5e8f5647ef6cda6250c780612a573d99e4d881581432ef1669"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-win_amd64.whl", hash = "sha256:87f6e732bccd7dcf1741c00f1ecf33797383128bd1c90144ac8adc02cbb98643"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:342d365988ba88ada8af320d43df4e0b13a694dbd75951f537b2d5e4cb5cd002"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f37c0caf14b9e9b9e8f6dbc81bc56db06acb4363eba5a633167781a48ef036ed"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa9373708763ef46782d10e950b49d0235bfe58facebd76917d3f5cbf5971aed"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d24f571990c05f6b36a396218f251f3e0dda916e0c687ef6fdca5072743208f5"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75432b5b14dc2fff43c50435e248b45c7cdadef73388e5610852b95280ffd0e9"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:884272dcd3ad97f47702965a0e902b540541890f468d24bd1d98bcfe41c3f018"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-win32.whl", hash = "sha256:e607cdd99cbf9bb80391f54446b86e16eea6ad309361942bf88318bcd452363c"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d505815ac340568fd03f719446a589162d55c52f08abd77ba8964fbb7eb5b5f"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0dacf67aee53b16f365c589ce72e766efaabd2b145f9de7c917777b575e3659d"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b801154027107461ee992ff4b5c09aa7cc6ec91ddfe50d02bca344918c3265c6"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59a21853f5daeb50412d459cfb13cb82c089ad4c04ec208cd14dddd99fc23b39"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29049e2c299b5ace92cbed0c1610a7a236f3baf4c6b66eb9547c01179f638ec5"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b64b183d610b424a160b0d4d880995e935208fc043d0302dd29fee32d1ee3f95"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f7a7d7fcc675d3d85fbf3b3828ecd5990b8d61bd6de3f1b260080b3beccf215"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-win32.whl", hash = "sha256:cf18ff7fc9941b8fc23437cc3e68ed4ebeff3599eec6ef5eebf305f3d2e9a7c2"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-win_amd64.whl", hash = "sha256:91f7d9d1c4dd1f4f6e092874c128c11165eafcf7c963128f79e28f8445de82d5"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bb209a73b8307f8fe4fe46f6ad5979649be01607f11af1eb94aa9e8a3aaf77f0"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:798f717ae7c806d67145f6ae94dc7c342d3222d3b9a311a784f371a4333212c7"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd402169aa00df3142149940b3bf9ce7dde075928c1886d9a1df63d4b8de62"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0d3cab3076af2e4aa5693f89622bef7fa770c6fec967143e4da7508b3dceb9b9"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:74b080c897563f81062b74e44f5a72fa44c2b373741a9ade701d5f789a10ba23"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-win32.whl", hash = "sha256:87d91043ea0dc65ee583026cb18e1b458d8ec5fc0a93637126b5fc0bc3ea68c4"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-win_amd64.whl", hash = "sha256:75f99202324383d613ddd1f7455ac908dca9c2dd729ec8584c9541dd41822a2c"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:420362338681eec03f53467804541a854617faed7272fe71a1bfdb07336a381e"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c88f0c7dcc5f99bdb34b4fd9b69b93c89f893f454f40219fe923a3a2fd11625"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3be4987e3ee9d9a380b66393b77a4cd6d742480c951a1c56a23c335caca4ce3"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a159111a0f58fb034c93eeba211b4141137ec4b0a6e75789ab7a3ef3c7e7e3"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8b8cb63d3ea63b29074dcd29da4dc6a97ad1349151f2d2949495418fd6e48db9"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:736ea78cd06de6c21ecba7416499e7236a22374561493b456a1f7ffbe3f6cdb4"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-win32.whl", hash = "sha256:10331f129982a19df4284ceac6fe87353ca3ca6b4ca77ff7d697209ae0a5915e"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-win_amd64.whl", hash = "sha256:c55731c116806836a5d678a70c84cb13f2cedba920212ba7dcad53260997666d"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:605b6b059f4b57b277f75ace81cc5bc6335efcbcc4ccb9066695e515dbdb3900"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:665f0a3954635b5b777a55111ababf44b4fc12b1f3ba0a435b602b6387ffd7cf"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecf6d4cda1f9f6cb0b45803a01ea7f034e2f1aed9475e883410812d9f9e3cfcf"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c51db269513917394faec5e5c00d6f83829742ba62e2ac4fa5c98d58be91662f"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:790f533fa5c8901a62b6fef5811d48980adeb2f51f1290ade8b5e7ba990ba3de"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1b1180cda6df7af84fe72e4530f192231b1f29a7496951db4ff38dac1687202d"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-win32.whl", hash = "sha256:555651adbb503ac7f4cb35834c5e4ae0819aab2cd24857a123370764dc7d7e24"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-win_amd64.whl", hash = "sha256:dc55990143cbd853a5d038c05e79284baedf3e299661389654551bd02a6a68d7"}, - {file = "SQLAlchemy-2.0.25-py3-none-any.whl", hash = "sha256:a86b4240e67d4753dc3092d9511886795b3c2852abe599cffe108952f7af7ac3"}, - {file = "SQLAlchemy-2.0.25.tar.gz", hash = "sha256:a2c69a7664fb2d54b8682dd774c3b54f67f84fa123cf84dda2a5f40dcaa04e08"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d04e579e911562f1055d26dab1868d3e0bb905db3bccf664ee8ad109f035618a"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa67d821c1fd268a5a87922ef4940442513b4e6c377553506b9db3b83beebbd8"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c7a596d0be71b7baa037f4ac10d5e057d276f65a9a611c46970f012752ebf2d"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:954d9735ee9c3fa74874c830d089a815b7b48df6f6b6e357a74130e478dbd951"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5cd20f58c29bbf2680039ff9f569fa6d21453fbd2fa84dbdb4092f006424c2e6"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:03f448ffb731b48323bda68bcc93152f751436ad6037f18a42b7e16af9e91c07"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-win32.whl", hash = "sha256:d997c5938a08b5e172c30583ba6b8aad657ed9901fc24caf3a7152eeccb2f1b4"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-win_amd64.whl", hash = "sha256:eb15ef40b833f5b2f19eeae65d65e191f039e71790dd565c2af2a3783f72262f"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c5bad7c60a392850d2f0fee8f355953abaec878c483dd7c3836e0089f046bf6"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3012ab65ea42de1be81fff5fb28d6db893ef978950afc8130ba707179b4284a"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbcd77c4d94b23e0753c5ed8deba8c69f331d4fd83f68bfc9db58bc8983f49cd"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d177b7e82f6dd5e1aebd24d9c3297c70ce09cd1d5d37b43e53f39514379c029c"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:680b9a36029b30cf063698755d277885d4a0eab70a2c7c6e71aab601323cba45"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1306102f6d9e625cebaca3d4c9c8f10588735ef877f0360b5cdb4fdfd3fd7131"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-win32.whl", hash = "sha256:5b78aa9f4f68212248aaf8943d84c0ff0f74efc65a661c2fc68b82d498311fd5"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-win_amd64.whl", hash = "sha256:15e19a84b84528f52a68143439d0c7a3a69befcd4f50b8ef9b7b69d2628ae7c4"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0de1263aac858f288a80b2071990f02082c51d88335a1db0d589237a3435fe71"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce850db091bf7d2a1f2fdb615220b968aeff3849007b1204bf6e3e50a57b3d32"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dfc936870507da96aebb43e664ae3a71a7b96278382bcfe84d277b88e379b18"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4fbe6a766301f2e8a4519f4500fe74ef0a8509a59e07a4085458f26228cd7cc"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4535c49d961fe9a77392e3a630a626af5baa967172d42732b7a43496c8b28876"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fb3bffc0ced37e5aa4ac2416f56d6d858f46d4da70c09bb731a246e70bff4d5"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-win32.whl", hash = "sha256:7f470327d06400a0aa7926b375b8e8c3c31d335e0884f509fe272b3c700a7254"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-win_amd64.whl", hash = "sha256:f9374e270e2553653d710ece397df67db9d19c60d2647bcd35bfc616f1622dcd"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e97cf143d74a7a5a0f143aa34039b4fecf11343eed66538610debc438685db4a"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7b5a3e2120982b8b6bd1d5d99e3025339f7fb8b8267551c679afb39e9c7c7f1"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e36aa62b765cf9f43a003233a8c2d7ffdeb55bc62eaa0a0380475b228663a38f"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5ada0438f5b74c3952d916c199367c29ee4d6858edff18eab783b3978d0db16d"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b1d9d1bfd96eef3c3faedb73f486c89e44e64e40e5bfec304ee163de01cf996f"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-win32.whl", hash = "sha256:ca891af9f3289d24a490a5fde664ea04fe2f4984cd97e26de7442a4251bd4b7c"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-win_amd64.whl", hash = "sha256:fd8aafda7cdff03b905d4426b714601c0978725a19efc39f5f207b86d188ba01"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec1f5a328464daf7a1e4e385e4f5652dd9b1d12405075ccba1df842f7774b4fc"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad862295ad3f644e3c2c0d8b10a988e1600d3123ecb48702d2c0f26771f1c396"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48217be1de7d29a5600b5c513f3f7664b21d32e596d69582be0a94e36b8309cb"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e56afce6431450442f3ab5973156289bd5ec33dd618941283847c9fd5ff06bf"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:611068511b5531304137bcd7fe8117c985d1b828eb86043bd944cebb7fae3910"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b86abba762ecfeea359112b2bb4490802b340850bbee1948f785141a5e020de8"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-win32.whl", hash = "sha256:30d81cc1192dc693d49d5671cd40cdec596b885b0ce3b72f323888ab1c3863d5"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-win_amd64.whl", hash = "sha256:120af1e49d614d2525ac247f6123841589b029c318b9afbfc9e2b70e22e1827d"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d07ee7793f2aeb9b80ec8ceb96bc8cc08a2aec8a1b152da1955d64e4825fcbac"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb0845e934647232b6ff5150df37ceffd0b67b754b9fdbb095233deebcddbd4a"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc19ae2e07a067663dd24fca55f8ed06a288384f0e6e3910420bf4b1270cc51"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b90053be91973a6fb6020a6e44382c97739736a5a9d74e08cc29b196639eb979"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f5c9dfb0b9ab5e3a8a00249534bdd838d943ec4cfb9abe176a6c33408430230"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33e8bde8fff203de50399b9039c4e14e42d4d227759155c21f8da4a47fc8053c"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-win32.whl", hash = "sha256:d873c21b356bfaf1589b89090a4011e6532582b3a8ea568a00e0c3aab09399dd"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-win_amd64.whl", hash = "sha256:ff2f1b7c963961d41403b650842dc2039175b906ab2093635d8319bef0b7d620"}, + {file = "SQLAlchemy-2.0.27-py3-none-any.whl", hash = "sha256:1ab4e0448018d01b142c916cc7119ca573803a4745cfe341b8f95657812700ac"}, + {file = "SQLAlchemy-2.0.27.tar.gz", hash = "sha256:86a6ed69a71fe6b88bf9331594fa390a2adda4a49b5c06f98e47bf0d392534f8"}, ] [package.dependencies] @@ -681,6 +802,27 @@ typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\"" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "typing-extensions" version = "4.9.0" @@ -812,4 +954,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "115cde0c7dc1de7906b4f17bbdaced3f98a969c33c3b85976a1dc5b0aeece3e2" +content-hash = "90a0874f29e706994647a141418ed4eca5bd621518396d525d27039ad586e4bc" diff --git a/api/pyproject.toml b/api/pyproject.toml index 6777d84..6034c3f 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "honcho" -version = "0.0.2" +version = "0.0.3" description = "Honcho Server" authors = ["Plastic Labs "] readme = "README.md" @@ -14,6 +14,8 @@ sqlalchemy = "^2.0.25" psycopg2-binary = "^2.9.9" slowapi = "^0.1.8" fastapi-pagination = "^0.12.14" +pgvector = "^0.2.5" +openai = "^1.12.0" [build-system] diff --git a/api/src/crud.py b/api/src/crud.py index a130892..9b8fa0c 100644 --- a/api/src/crud.py +++ b/api/src/crud.py @@ -1,12 +1,16 @@ -import json import uuid -from typing import Optional +import datetime +from typing import Optional, Sequence + +from openai import OpenAI from sqlalchemy import select, Select from sqlalchemy.orm import Session +from sqlalchemy.exc import IntegrityError from . import models, schemas +openai_client = OpenAI() def get_session(db: Session, app_id: str, session_id: uuid.UUID, user_id: Optional[str] = None) -> Optional[models.Session]: stmt = select(models.Session).where(models.Session.app_id == app_id).where(models.Session.id == session_id) @@ -14,8 +18,6 @@ def get_session(db: Session, app_id: str, session_id: uuid.UUID, user_id: Option stmt = stmt.where(models.Session.user_id == user_id) session = db.scalars(stmt).one_or_none() return session - # return db.query(models.Session).filter(models.Session.id == session_id).first() - def get_sessions( db: Session, app_id: str, user_id: str, location_id: str | None = None @@ -32,16 +34,15 @@ def get_sessions( stmt = stmt.where(models.Session.location_id == location_id) return stmt - # return db.scalars(stmt).all() def create_session( - db: Session, app_id: str, user_id: str, session: schemas.SessionCreate + db: Session, session: schemas.SessionCreate, app_id: str, user_id: str ) -> models.Session: honcho_session = models.Session( app_id=app_id, user_id=user_id, location_id=session.location_id, - session_data=json.dumps(session.session_data), + h_metadata=session.metadata, ) db.add(honcho_session) db.commit() @@ -49,16 +50,18 @@ def create_session( return honcho_session -def update_session(db: Session, app_id: str, user_id: str, session_id: uuid.UUID, session_data: dict) -> bool: +def update_session( + db: Session, session: schemas.SessionUpdate, app_id: str, user_id: str, session_id: uuid.UUID +) -> bool: honcho_session = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) if honcho_session is None: raise ValueError("Session not found or does not belong to user") - honcho_session.session_data = json.dumps(session_data) + if session.metadata is not None: # Need to explicitly be there won't make it empty by default + honcho_session.h_metadata = session.metadata db.commit() db.refresh(honcho_session) return honcho_session - def delete_session(db: Session, app_id: str, user_id: str, session_id: uuid.UUID) -> bool: stmt = ( select(models.Session) @@ -73,7 +76,6 @@ def delete_session(db: Session, app_id: str, user_id: str, session_id: uuid.UUID db.commit() return True - def create_message( db: Session, message: schemas.MessageCreate, app_id: str, user_id: str, session_id: uuid.UUID ) -> models.Message: @@ -91,13 +93,9 @@ def create_message( db.refresh(honcho_message) return honcho_message - def get_messages( db: Session, app_id: str, user_id: str, session_id: uuid.UUID ) -> Select: - # session = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) - # if session is None: - # raise ValueError("Session not found or does not belong to user") stmt = ( select(models.Message) .join(models.Session, models.Session.id == models.Message.session_id) @@ -107,19 +105,10 @@ def get_messages( .order_by(models.Message.created_at) ) return stmt - # return db.scalars(stmt).all() - # return ( - # db.query(models.Message) - # .filter(models.Message.session_id == session_id) - # .all() - # ) def get_message( db: Session, app_id: str, user_id: str, session_id: uuid.UUID, message_id: uuid.UUID ) -> Optional[models.Message]: - # session = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) - # if session is None: - # raise ValueError("Session not found or does not belong to user") stmt = ( select(models.Message) .join(models.Session, models.Session.id == models.Message.session_id) @@ -131,6 +120,9 @@ def get_message( ) return db.scalars(stmt).one_or_none() +######################################################## +# metamessage methods +######################################################## def get_metamessages(db: Session, app_id: str, user_id: str, session_id: uuid.UUID, message_id: Optional[uuid.UUID], metamessage_type: Optional[str] = None) -> Select: stmt = ( @@ -185,3 +177,210 @@ def create_metamessage( db.commit() db.refresh(honcho_metamessage) return honcho_metamessage + +######################################################## +# collection methods +######################################################## + +# Should be very similar to the session methods + +def get_collections(db: Session, app_id: str, user_id: str) -> Select: + """Get a distinct list of the names of collections associated with a user""" + stmt = ( + select(models.Collection) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .order_by(models.Collection.created_at) + ) + return stmt + +def get_collection_by_id(db: Session, app_id: str, user_id: str, collection_id: uuid.UUID) -> Optional[models.Collection]: + stmt = ( + select(models.Collection) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .where(models.Collection.id == collection_id) + ) + collection = db.scalars(stmt).one_or_none() + return collection + +def get_collection_by_name(db: Session, app_id: str, user_id: str, name: str) -> Optional[models.Collection]: + stmt = ( + select(models.Collection) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .where(models.Collection.name == name) + ) + collection = db.scalars(stmt).one_or_none() + return collection + +def create_collection( + db: Session, collection: schemas.CollectionCreate, app_id: str, user_id: str +) -> models.Collection: + honcho_collection = models.Collection( + app_id=app_id, + user_id=user_id, + name=collection.name, + ) + try: + db.add(honcho_collection) + db.commit() + except IntegrityError: + db.rollback() + raise ValueError("Collection already exists") + db.refresh(honcho_collection) + return honcho_collection + +def update_collection( + db: Session, collection: schemas.CollectionUpdate, app_id: str, user_id: str, collection_id: uuid.UUID +) -> models.Collection: + honcho_collection = get_collection_by_id(db, app_id=app_id, user_id=user_id, collection_id=collection_id) + if honcho_collection is None: + raise ValueError("collection not found or does not belong to user") + try: + honcho_collection.name = collection.name + db.commit() + except IntegrityError: + db.rollback() + raise ValueError("Collection already exists") + db.refresh(honcho_collection) + return honcho_collection + +def delete_collection( + db: Session, app_id: str, user_id: str, collection_id: uuid.UUID +) -> bool: + """ + Delete a Collection and all documents associated with it. Takes advantage of + the orm cascade feature + """ + stmt = ( + select(models.Collection) + .where(models.Collection.id == collection_id) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + ) + honcho_collection = db.scalars(stmt).one_or_none() + if honcho_collection is None: + return False + db.delete(honcho_collection) + db.commit() + return True + +######################################################## +# document methods +######################################################## + +# Should be similar to the messages methods outside of query + +def get_documents( + db: Session, app_id: str, user_id: str, collection_id: uuid.UUID +) -> Select: + stmt = ( + select(models.Document) + .join(models.Collection, models.Collection.id == models.Document.collection_id) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .where(models.Document.collection_id == collection_id) + .order_by(models.Document.created_at) + ) + return stmt + +def get_document( + db: Session, app_id: str, user_id: str, collection_id: uuid.UUID, document_id: uuid.UUID +) -> Optional[models.Document]: + stmt = ( + select(models.Document) + .join(models.Collection, models.Collection.id == models.Document.collection_id) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .where(models.Document.collection_id == collection_id) + .where(models.Document.id == document_id) + ) + + document = db.scalars(stmt).one_or_none() + return document + + +def query_documents(db: Session, app_id: str, user_id: str, collection_id: uuid.UUID, query: str, top_k: int = 5) -> Sequence[models.Document]: + response = openai_client.embeddings.create( + input=query, + model="text-embedding-3-small" + ) + embedding_query = response.data[0].embedding + stmt = ( + select(models.Document) + .join(models.Collection, models.Collection.id == models.Document.collection_id) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .where(models.Document.collection_id == collection_id) + .order_by(models.Document.embedding.cosine_distance(embedding_query)) + .limit(top_k) + ) + # if metadata is not None: + # stmt = stmt.where(models.Document.h_metadata.contains(metadata)) + return db.scalars(stmt).all() + +def create_document( + db: Session, document: schemas.DocumentCreate, app_id: str, user_id: str, collection_id: uuid.UUID +) -> models.Document: + """Embed a message as a vector and create a document""" + collection = get_collection_by_id(db, app_id=app_id, collection_id=collection_id, user_id=user_id) + if collection is None: + raise ValueError("Session not found or does not belong to user") + + response = openai_client.embeddings.create( + input=document.content, + model="text-embedding-3-small" + ) + + embedding = response.data[0].embedding + + honcho_document = models.Document( + collection_id=collection_id, + content=document.content, + h_metadata=document.metadata, + embedding=embedding + ) + db.add(honcho_document) + db.commit() + db.refresh(honcho_document) + return honcho_document + +def update_document( + db: Session, document: schemas.DocumentUpdate, app_id: str, user_id: str, collection_id: uuid.UUID, document_id: uuid.UUID +) -> bool: + honcho_document = get_document(db, app_id=app_id, collection_id=collection_id, user_id=user_id, document_id=document_id) + if honcho_document is None: + raise ValueError("Session not found or does not belong to user") + if document.content is not None: + honcho_document.content = document.content + response = openai_client.embeddings.create( + input=document.content, + model="text-embedding-3-small" + ) + embedding = response.data[0].embedding + honcho_document.embedding = embedding + honcho_document.created_at = datetime.datetime.now() + + if document.metadata is not None: + honcho_document.h_metadata = document.metadata + db.commit() + db.refresh(honcho_document) + return honcho_document + +def delete_document(db: Session, app_id: str, user_id: str, collection_id: uuid.UUID, document_id: uuid.UUID) -> bool: + stmt = ( + select(models.Document) + .join(models.Collection, models.Collection.id == models.Document.collection_id) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .where(models.Document.collection_id == collection_id) + .where(models.Document.id == document_id) + ) + document = db.scalars(stmt).one_or_none() + if document is None: + return False + db.delete(document) + db.commit() + return True + diff --git a/api/src/main.py b/api/src/main.py index c3ef0d8..7a16e9c 100644 --- a/api/src/main.py +++ b/api/src/main.py @@ -1,6 +1,6 @@ import uuid from fastapi import Depends, FastAPI, HTTPException, APIRouter, Request -from typing import Optional +from typing import Optional, Sequence from sqlalchemy.orm import Session from slowapi import Limiter, _rate_limit_exceeded_handler from slowapi.middleware import SlowAPIMiddleware @@ -9,7 +9,6 @@ from fastapi_pagination import Page, add_pagination from fastapi_pagination.ext.sqlalchemy import paginate -# import uvicorn from . import crud, models, schemas from .db import SessionLocal, engine @@ -44,7 +43,13 @@ def get_db(): ######################################################## @router.get("/sessions", response_model=Page[schemas.Session]) -def get_sessions(request: Request, app_id: str, user_id: str, location_id: Optional[str] = None, db: Session = Depends(get_db)): +def get_sessions( + request: Request, + app_id: str, + user_id: str, + location_id: Optional[str] = None, + db: Session = Depends(get_db) +): """Get All Sessions for a User Args: @@ -56,11 +61,7 @@ def get_sessions(request: Request, app_id: str, user_id: str, location_id: Optio list[schemas.Session]: List of Session objects """ - # if location_id is not None: - # return paginate(db, crud.get_sessions(db, app_id=app_id, user_id=user_id, location_id=location_id)) - # return crud.get_sessions(db, app_id=app_id, user_id=user_id, location_id=location_id) return paginate(db, crud.get_sessions(db, app_id=app_id, user_id=user_id, location_id=location_id)) - # return crud.get_sessions(db, app_id=app_id, user_id=user_id) @router.post("/sessions", response_model=schemas.Session) @@ -102,10 +103,10 @@ def update_session( schemas.Session: The Session object of the updated Session """ - if session.session_data is None: - raise HTTPException(status_code=400, detail="Session data cannot be empty") # TODO TEST if I can set the metadata to be blank with this + if session.metadata is None: + raise HTTPException(status_code=400, detail="Session metadata cannot be empty") # TODO TEST if I can set the metadata to be blank with this try: - return crud.update_session(db, app_id=app_id, user_id=user_id, session_id=session_id, session_data=session.session_data) + return crud.update_session(db, app_id=app_id, user_id=user_id, session_id=session_id, session=session) except ValueError: raise HTTPException(status_code=404, detail="Session not found") @@ -243,10 +244,8 @@ def get_message( raise HTTPException(status_code=404, detail="Session not found") return honcho_message - - ######################################################## -# Metacognition Routes +# metamessage routes ######################################################## @router.post( @@ -276,9 +275,6 @@ def create_metamessage( HTTPException: If the session is not found """ - print("=======================") - print(request) - print("=======================") try: return crud.create_metamessage(db, metamessage=metamessage, app_id=app_id, user_id=user_id, session_id=session_id) except ValueError: @@ -336,5 +332,176 @@ def get_metamessage(request: Request, app_id: str, user_id: str, session_id: uui raise HTTPException(status_code=404, detail="Session not found") return honcho_metamessage +######################################################## +# collection routes +######################################################## + +@router.get("/collections/all", response_model=Page[schemas.Collection]) +def get_collections( + request: Request, + app_id: str, + user_id: str, + db: Session = Depends(get_db), +): + return paginate(db, crud.get_collections(db, app_id=app_id, user_id=user_id)) + +@router.get("/collections/id/{collection_id}", response_model=schemas.Collection) +def get_collection_by_id( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + db: Session = Depends(get_db) +) -> schemas.Collection: + honcho_collection = crud.get_collection_by_id(db, app_id=app_id, user_id=user_id, collection_id=collection_id) + if honcho_collection is None: + raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + return honcho_collection + +@router.get("/collections/name/{name}", response_model=schemas.Collection) +def get_collection_by_name( + request: Request, + app_id: str, + user_id: str, + name: str, + db: Session = Depends(get_db) +) -> schemas.Collection: + honcho_collection = crud.get_collection_by_name(db, app_id=app_id, user_id=user_id, name=name) + if honcho_collection is None: + raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + return honcho_collection + +@router.post("/collections", response_model=schemas.Collection) +def create_collection( + request: Request, + app_id: str, + user_id: str, + collection: schemas.CollectionCreate, + db: Session = Depends(get_db) +): + try: + return crud.create_collection(db, collection=collection, app_id=app_id, user_id=user_id) + except ValueError: + raise HTTPException(status_code=406, detail="Error invalid collection configuration - name may already exist") + +@router.put("/collections/{collection_id}", response_model=schemas.Collection) +def update_collection( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + collection: schemas.CollectionUpdate, + db: Session = Depends(get_db) +): + if collection.name is None: + raise HTTPException(status_code=400, detail="invalid request - name cannot be None") + try: + honcho_collection = crud.update_collection(db, collection=collection, app_id=app_id, user_id=user_id, collection_id=collection_id) + except ValueError: + raise HTTPException(status_code=406, detail="Error invalid collection configuration - name may already exist") + return honcho_collection + +@router.delete("/collections/{collection_id}") +def delete_collection( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + db: Session = Depends(get_db) +): + response = crud.delete_collection(db, app_id=app_id, user_id=user_id, collection_id=collection_id) + if response: + return {"message": "Collection deleted successfully"} + else: + raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + +######################################################## +# Document routes +######################################################## + +@router.get("/collections/{collection_id}/documents", response_model=Page[schemas.Document]) +def get_documents( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + db: Session = Depends(get_db) +): + try: + return paginate(db, crud.get_documents(db, app_id=app_id, user_id=user_id, collection_id=collection_id)) + except ValueError: # TODO can probably remove this exception ok to return empty here + raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + +router.get("/collections/{collection_id}/documents/{document_id}", response_model=schemas.Document) +def get_document( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + document_id: uuid.UUID, + db: Session = Depends(get_db) +): + honcho_document = crud.get_document(db, app_id=app_id, user_id=user_id, collection_id=collection_id, document_id=document_id) + if honcho_document is None: + raise HTTPException(status_code=404, detail="document not found or does not belong to user") + return honcho_document + + +@router.get("/collections/{collection_id}/query", response_model=Sequence[schemas.Document]) +def query_documents( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + query: str, + top_k: int = 5, + db: Session = Depends(get_db) +): + if top_k is not None and top_k > 50: + top_k = 50 # TODO see if we need to paginate this + return crud.query_documents(db=db, app_id=app_id, user_id=user_id, collection_id=collection_id, query=query, top_k=top_k) + +@router.post("/collections/{collection_id}/documents", response_model=schemas.Document) +def create_document( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + document: schemas.DocumentCreate, + db: Session = Depends(get_db) +): + try: + return crud.create_document(db, document=document, app_id=app_id, user_id=user_id, collection_id=collection_id) + except ValueError: + raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + +@router.put("/collections/{collection_id}/documents/{document_id}", response_model=schemas.Document) +def update_document( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + document_id: uuid.UUID, + document: schemas.DocumentUpdate, + db: Session = Depends(get_db) +): + if document.content is None and document.metadata is None: + raise HTTPException(status_code=400, detail="content and metadata cannot both be None") + return crud.update_document(db, document=document, app_id=app_id, user_id=user_id, collection_id=collection_id, document_id=document_id) + +@router.delete("/collections/{collection_id}/documents/{document_id}") +def delete_document( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + document_id: uuid.UUID, + db: Session = Depends(get_db) +): + response = crud.delete_document(db, app_id=app_id, user_id=user_id, collection_id=collection_id, document_id=document_id) + if response: + return {"message": "Document deleted successfully"} + else: + raise HTTPException(status_code=404, detail="document not found or does not belong to user") app.include_router(router) diff --git a/api/src/models.py b/api/src/models.py index 4371229..ea4b86b 100644 --- a/api/src/models.py +++ b/api/src/models.py @@ -1,10 +1,20 @@ -from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DateTime, Uuid -import uuid import datetime -from sqlalchemy.orm import relationship, Mapped, mapped_column +import os +import uuid + +from dotenv import load_dotenv +from pgvector.sqlalchemy import Vector +from sqlalchemy import JSON, Column, ForeignKey, String, UniqueConstraint, Uuid +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.orm import Mapped, mapped_column, relationship from .db import Base +load_dotenv() + +DATABASE_TYPE = os.getenv("DATABASE_TYPE", 'postgres') + +ColumnType = JSONB if DATABASE_TYPE == 'postgres' else JSON class Session(Base): __tablename__ = "sessions" @@ -13,13 +23,12 @@ class Session(Base): user_id: Mapped[str] = mapped_column(String(512), index=True) location_id: Mapped[str] = mapped_column(String(512), index=True) is_active: Mapped[bool] = mapped_column(default=True) - session_data: Mapped[str] + h_metadata: Mapped[dict] = mapped_column("metadata", ColumnType, default={}) created_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.utcnow) messages = relationship("Message", back_populates="session") def __repr__(self) -> str: - return f"Session(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, is_active={self.is_active}, created_at={self.created_at})" - + return f"Session(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, is_active={self.is_active}, created_at={self.created_at}, h_metadata={self.h_metadata})" class Message(Base): __tablename__ = "messages" @@ -34,7 +43,6 @@ class Message(Base): def __repr__(self) -> str: return f"Message(id={self.id}, session_id={self.session_id}, is_user={self.is_user}, content={self.content[10:]})" - class Metamessage(Base): __tablename__ = "metamessages" id: Mapped[uuid.UUID] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) @@ -47,3 +55,27 @@ class Metamessage(Base): def __repr__(self) -> str: return f"Metamessages(id={self.id}, message_id={self.message_id}, metamessage_type={self.metamessage_type}, content={self.content[10:]})" + +class Collection(Base): + __tablename__ = "collections" + id: Mapped[uuid.UUID] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) + name: Mapped[str] = mapped_column(String(512), index=True) + app_id: Mapped[str] = mapped_column(String(512), index=True) + user_id: Mapped[str] = mapped_column(String(512), index=True) + created_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.utcnow) + documents = relationship("Document", back_populates="collection", cascade="all, delete, delete-orphan") + + __table_args__ = ( + UniqueConstraint('name', 'app_id', 'user_id', name="unique_name_app_user"), + ) + +class Document(Base): + __tablename__ = "documents" + id: Mapped[uuid.UUID] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) + h_metadata: Mapped[dict] = mapped_column("metadata", ColumnType, default={}) + content: Mapped[str] = mapped_column(String(65535)) + embedding = mapped_column(Vector(1536)) + created_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.utcnow) + + collection_id = Column(Uuid, ForeignKey("collections.id")) + collection = relationship("Collection", back_populates="documents") diff --git a/api/src/schemas.py b/api/src/schemas.py index b6bff90..fe164aa 100644 --- a/api/src/schemas.py +++ b/api/src/schemas.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel +from pydantic import BaseModel, validator import datetime import uuid @@ -18,8 +18,7 @@ class Message(MessageBase): created_at: datetime.datetime class Config: - orm_mode = True - + from_attributes = True class SessionBase(BaseModel): pass @@ -27,12 +26,10 @@ class SessionBase(BaseModel): class SessionCreate(SessionBase): location_id: str - session_data: dict | None = None - - + metadata: dict | None = {} + class SessionUpdate(SessionBase): - session_data: dict | None = None - + metadata: dict | None = None class Session(SessionBase): id: uuid.UUID @@ -41,11 +38,21 @@ class Session(SessionBase): user_id: str location_id: str app_id: str - session_data: str + h_metadata: dict + metadata: dict created_at: datetime.datetime + @validator('metadata', pre=True, allow_reuse=True) + def fetch_h_metadata(cls, value, values): + if 'h_metadata' in values: + return values['h_metadata'] + return {} + class Config: - orm_mode = True + from_attributes = True + schema_extra = { + "exclude": ["h_metadata"] + } class MetamessageBase(BaseModel): @@ -64,3 +71,53 @@ class Metamessage(MetamessageBase): class Config: orm_mode = True + +class CollectionBase(BaseModel): + pass + +class CollectionCreate(CollectionBase): + name: str + +class CollectionUpdate(CollectionBase): + name: str + +class Collection(CollectionBase): + id: uuid.UUID + name: str + app_id: str + user_id: str + created_at: datetime.datetime + + class Config: + orm_mode = True + +class DocumentBase(BaseModel): + content: str + +class DocumentCreate(DocumentBase): + metadata: dict | None = {} + +class DocumentUpdate(DocumentBase): + metadata: dict | None = None + content: str | None = None + +class Document(DocumentBase): + id: uuid.UUID + content: str + h_metadata: dict + metadata: dict + created_at: datetime.datetime + collection_id: uuid.UUID + + @validator('metadata', pre=True, allow_reuse=True) + def fetch_h_metadata(cls, value, values): + if 'h_metadata' in values: + return values['h_metadata'] + return {} + + class Config: + from_attributes = True + schema_extra = { + "exclude": ["h_metadata"] + } + diff --git a/example/discord/simple-roast-bot/README.md b/example/discord/simple-roast-bot/README.md index a2acdf8..4c6500b 100644 --- a/example/discord/simple-roast-bot/README.md +++ b/example/discord/simple-roast-bot/README.md @@ -2,6 +2,8 @@ The goal of this repo is to demonstrate how to deploy an LLM application using Honcho to manage user data. Here we've implemented a simple Discord bot that interacts with OpenAI's GPT-3.5-Turbo model via LangChain. Oh, and also, it's prompted to roast you. +***This demo is live -- join our Discord server and the bot will DM you to start the conversation*** + To run locally, follow these steps: ### Clone the Repository diff --git a/scripts/syncronizer.py b/scripts/syncronizer.py index 758f3eb..0630b8f 100644 --- a/scripts/syncronizer.py +++ b/scripts/syncronizer.py @@ -16,3 +16,23 @@ destination_file_path = os.path.join(this_dir, "../sdk/honcho/sync_client.py") with open(destination_file_path, "w") as destination_file: destination_file.write(sync_code) + + +# tests + +# Open the source file +source_file_path = os.path.join(this_dir, "../sdk/tests/test_async.py") +with open(source_file_path, "r") as source_file: + source_code = source_file.read() + +# Use regex to remove async mentions +sync_code = re.sub(r"@pytest.mark.asyncio\n", "", source_code) +sync_code = re.sub(r"async\s", "", sync_code) +sync_code = re.sub(r"await\s", "", sync_code) +sync_code = re.sub(r"__anext__", "__next__", sync_code) +sync_code = re.sub(r"Async", "", sync_code) + +# Write the modified code to the destination file +destination_file_path = os.path.join(this_dir, "../sdk/tests/test_sync.py") +with open(destination_file_path, "w") as destination_file: + destination_file.write(sync_code) diff --git a/sdk/CHANGELOG.md b/sdk/CHANGELOG.md index 1d8ecd5..54965e4 100644 --- a/sdk/CHANGELOG.md +++ b/sdk/CHANGELOG.md @@ -6,6 +6,21 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## [0.0.3] — 2024-02-15 + +### Added + +* Collections table to reference a collection of embedding documents +* Documents table to hold vector embeddings for RAG workflows +* Local scripts for running a postgres database with pgvector installed +* OpenAI Dependency for embedding models +* PGvector dependency for vector db support + +### Changed + +* session_data is now metadata +* session_data is a JSON field used python `dict` for compatability + ## [0.0.2] — 2024-02-08 ### Added diff --git a/sdk/honcho/__init__.py b/sdk/honcho/__init__.py index e87b439..eda9003 100644 --- a/sdk/honcho/__init__.py +++ b/sdk/honcho/__init__.py @@ -1,4 +1,4 @@ -from .client import AsyncClient, AsyncSession, AsyncGetSessionPage, AsyncGetMessagePage, AsyncGetMetamessagePage -from .sync_client import Client, Session, GetSessionPage, GetMessagePage, GetMetamessagePage -from .schemas import Message, Metamessage +from .client import AsyncClient, AsyncSession, AsyncCollection, AsyncGetSessionPage, AsyncGetMessagePage, AsyncGetMetamessagePage, AsyncGetDocumentPage, AsyncGetCollectionPage +from .sync_client import Client, Session, Collection, GetSessionPage, GetMessagePage, GetMetamessagePage, GetDocumentPage, GetCollectionPage +from .schemas import Message, Metamessage, Document from .cache import LRUCache diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index c5a1373..a21532b 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -1,8 +1,8 @@ -import json import uuid -from typing import Dict, Optional +import datetime +from typing import Dict, Optional, List import httpx -from .schemas import Message, Metamessage +from .schemas import Message, Metamessage, Document class AsyncGetPage: """Base class for receiving Paginated API results""" @@ -44,7 +44,8 @@ def __init__(self, client, options: Dict, response: Dict): user_id=session["user_id"], location_id=session["location_id"], is_active=session["is_active"], - session_data=session["session_data"], + metadata=session["metadata"], + created_at=session["created_at"], ) for session in response["items"] ] @@ -97,13 +98,13 @@ def __init__(self, session, options: Dict, response: Dict) -> None: Args: session (AsyncSession): Session the returned messages are associated with - options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are message_id and metamessage_type which are both required + options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are message_id and metamessage_type which are both optional response (Dict): Response from API with pagination information """ super().__init__(response) self.session = session - self.message_id = options["message_id"] - self.metamessage_type = options["metamessage_type"] + self.message_id = options["message_id"] if "message_id" in options else None + self.metamessage_type = options["metamessage_type"] if "metamessage_type" in options else None self.items = [ Metamessage( id=metamessage["id"], @@ -124,7 +125,70 @@ async def next(self): return None return await self.session.get_metamessages(metamessage_type=self.metamessage_type, message=self.message_id, page=(self.page + 1), page_size=self.page_size) +class AsyncGetDocumentPage(AsyncGetPage): + """Paginated results for Get Document requests""" + def __init__(self, collection, response: Dict) -> None: + """Constructor for Page Result from Document Get Request + + Args: + collection (AsyncCollection): Collection the returned documents are associated with + response (Dict): Response from API with pagination information + """ + super().__init__(response) + self.collection = collection + self.items = [ + Document( + id=document["id"], + collection_id=collection.id, + content=document["content"], + metadata=document["metadata"], + created_at=document["created_at"], + ) + for document in response["items"] + ] + async def next(self): + """Get the next page of results + Returns: + AsyncGetDocumentPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + """ + if self.page >= self.pages: + return None + return await self.collection.get_documents(page=self.page + 1, page_size=self.page_size) + +class AsyncGetCollectionPage(AsyncGetPage): + """Paginated results for Get Collection requests""" + + def __init__(self, client, options: Dict, response: Dict): + """Constructor for page result from Get Collection Request + + Args: + client (Async Client): Honcho Client + options (Dict): Options for the request used mainly for next() to filter queries. The only parameter available is user_id which is required + response (Dict): Response from API with pagination information + """ + super().__init__(response) + self.client = client + self.user_id = options["user_id"] + self.items = [ + AsyncCollection( + client=client, + id=collection["id"], + user_id=collection["user_id"], + name=collection["name"], + created_at=collection["created_at"], + ) + for collection in response["items"] + ] + + async def next(self): + """Get the next page of results + Returns: + AsyncGetCollectionPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + """ + if self.page >= self.pages: + return None + return await self.client.get_collections(user_id=self.user_id, page=self.page + 1, page_size=self.page_size) class AsyncClient: """Honcho API Client Object""" @@ -161,7 +225,8 @@ async def get_session(self, user_id: str, session_id: uuid.UUID): user_id=data["user_id"], location_id=data["location_id"], is_active=data["is_active"], - session_data=data["session_data"], + metadata=data["metadata"], + created_at=data["created_at"] ) async def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: int = 1, page_size: int = 50): @@ -215,20 +280,20 @@ async def get_sessions_generator(self, user_id: str, location_id: Optional[str] get_session_response = new_sessions async def create_session( - self, user_id: str, location_id: str = "default", session_data: Dict = {} + self, user_id: str, location_id: str = "default", metadata: Dict = {} ): """Create a session for a user Args: user_id (str): The User ID representing the user, managed by the user location_id (str, optional): Optional Location ID representing the location of a session - session_data (Dict, optional): Optional session metadata + metadata (Dict, optional): Optional session metadata Returns: AsyncSession: The Session object of the new Session """ - data = {"location_id": location_id, "session_data": session_data} + data = {"location_id": location_id, "metadata": metadata} url = f"{self.common_prefix}/users/{user_id}/sessions" response = await self.client.post(url, json=data) response.raise_for_status() @@ -238,10 +303,103 @@ async def create_session( id=data["id"], user_id=user_id, location_id=location_id, - session_data=session_data, + metadata=metadata, is_active=data["is_active"], + created_at=data["created_at"], + ) + + async def create_collection( + self, user_id: str, name: str, + ): + """Create a collection for a user + + Args: + user_id (str): The User ID representing the user, managed by the user + name (str): unique name for the collection for the user + + Returns: + AsyncCollection: The Collection object of the new Collection + + """ + data = {"name": name} + url = f"{self.common_prefix}/users/{user_id}/collections" + response = await self.client.post(url, json=data) + response.raise_for_status() + data = response.json() + return AsyncCollection( + self, + id=data["id"], + user_id=user_id, + name=name, + created_at=data["created_at"], ) + async def get_collection(self, user_id: str, name: str): + """Get a specific collection for a user by name + + Args: + user_id (str): The User ID representing the user, managed by the user + name (str): The name of the collection to get + + Returns: + AsyncCollection: The Session object of the requested Session + + """ + url = f"{self.common_prefix}/users/{user_id}/collections/name/{name}" + response = await self.client.get(url) + response.raise_for_status() + data = response.json() + return AsyncCollection( + client=self, + id=data["id"], + user_id=data["user_id"], + name=data["name"], + created_at=data["created_at"] + ) + + async def get_collections(self, user_id: str, page: int = 1, page_size: int = 50): + """Return collections associated with a user paginated + + Args: + user_id (str): The User ID representing the user to get the collection for + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return + + Returns: + AsyncGetCollectionPage: Page or results for get_collections query + + """ + url = f"{self.common_prefix}/users/{user_id}/collections/all?page={page}&size={page_size}" + response = await self.client.get(url) + response.raise_for_status() + data = response.json() + options = {"user_id": user_id} + return AsyncGetCollectionPage(self, options, data) + + async def get_collections_generator(self, user_id: str): + """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app + + Args: + user_id (str): The User ID representing the user, managed by the user + + Yields: + AsyncCollection: The Session object of the requested Session + + """ + page = 1 + page_size = 50 + get_collection_response = await self.get_collections(user_id, page, page_size) + while True: + # get_collection_response = self.get_collections(user_id, location_id, page, page_size) + for collection in get_collection_response.items: + yield collection + + new_collections = await get_collection_response.next() + if not new_collections: + break + + get_collection_response = new_collections + class AsyncSession: """Represents a single session for a user in an app""" @@ -252,20 +410,20 @@ def __init__( id: uuid.UUID, user_id: str, location_id: str, - session_data: dict | str, + metadata: dict, is_active: bool, + created_at: datetime.datetime ): """Constructor for Session""" - self.base_url = client.base_url - self.client = client.client - self.app_id = client.app_id - self.id = id - self.user_id = user_id - self.location_id = location_id - self.session_data = ( - session_data if isinstance(session_data, dict) else json.loads(session_data) - ) - self._is_active = is_active + self.base_url: str = client.base_url + self.client: httpx.AsyncClient = client.client + self.app_id: str = client.app_id + self.id: uuid.UUID = id + self.user_id: str = user_id + self.location_id: str = location_id + self.metadata: dict = metadata + self._is_active: bool = is_active + self.created_at: datetime.datetime = created_at @property def common_prefix(self): @@ -274,7 +432,8 @@ def common_prefix(self): def __str__(self): """String representation of Session""" - return f"AsyncSession(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, session_data={self.session_data}, is_active={self.is_active})" + return f"AsyncSession(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, metadata={self.metadata}, is_active={self.is_active})" + @property def is_active(self): @@ -360,7 +519,7 @@ async def create_metamessage(self, message: Message, metamessage_type: str, cont Args: message (Message): A message to associate the metamessage with - metamessage_type (str): The type of the metamessage arbitrary itentifier + metamessage_type (str): The type of the metamessage arbitrary identifier content (str): The content of the metamessage Returns: @@ -444,21 +603,20 @@ async def get_metamessages_generator(self, metamessage_type: Optional[str] = Non get_metamessages_page = new_messages - async def update(self, session_data: Dict): - """Update the session_data of a session + async def update(self, metadata: Dict): + """Update the metadata of a session Args: - session_data (Dict): The Session object containing any new session_data - + metadata (Dict): The Session object containing any new metadata Returns: boolean: Whether the session was successfully updated """ - info = {"session_data": session_data} + info = {"metadata": metadata} url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}" response = await self.client.put(url, json=info) success = response.status_code < 400 - self.session_data = session_data + self.metadata = metadata return success async def close(self): @@ -468,3 +626,202 @@ async def close(self): response.raise_for_status() self._is_active = False +class AsyncCollection: + """Represents a single collection for a user in an app""" + + def __init__( + self, + client: AsyncClient, + id: uuid.UUID, + user_id: str, + name: str, + created_at: datetime.datetime, + ): + """Constructor for Collection""" + self.base_url: str = client.base_url + self.client: httpx.AsyncClient = client.client + self.app_id: str = client.app_id + self.id: uuid.UUID = id + self.user_id: str = user_id + self.name: str = name + self.created_at: datetime.datetime = created_at + + @property + def common_prefix(self): + """Shortcut for common API prefix. made a property to prevent tampering""" + return f"{self.base_url}/apps/{self.app_id}" + + def __str__(self): + """String representation of Collection""" + return f"AsyncCollection(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, name={self.name}, created_at={self.created_at})" + + async def update(self, name: str): + """Update the name of the collection + + Args: + name (str): The new name of the document + + Returns: + boolean: Whether the session was successfully updated + """ + info = {"name": name} + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}" + response = await self.client.put(url, json=info) + response.raise_for_status() + success = response.status_code < 400 + self.name = name + return success + + async def delete(self): + """Delete a collection and all associated documents""" + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}" + response = await self.client.delete(url) + response.raise_for_status() + + async def create_document(self, content: str, metadata: Dict = {}): + """Adds a document to the collection + + Args: + content (str): The content of the document + metadata (Dict): The metadata of the document + + Returns: + Document: The Document object of the added document + + """ + data = {"metadata": metadata, "content": content} + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents" + response = await self.client.post(url, json=data) + response.raise_for_status() + data = response.json() + return Document( + collection_id=self.id, + id=data["id"], + metadata=metadata, + content=content, + created_at=data["created_at"] + ) + + async def get_document(self, document_id: uuid.UUID) -> Document: + """Get a specific document for a collection based on ID + + Args: + document_id (uuid.UUID): The ID of the Document to retrieve + + Returns: + Document: The Document object + + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document_id}" + response = await self.client.get(url) + response.raise_for_status() + data = response.json() + return Document( + collection_id=self.id, + id=data["id"], + metadata=data["metadata"], + content=data["content"], + created_at=data["created_at"] + ) + + async def get_documents(self, page: int = 1, page_size: int = 50) -> AsyncGetDocumentPage: + """Get all documents for a collection + + Args: + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return per page + + Returns: + AsyncGetDocumentPage: Page of Document objects + + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents?page={page}&size={page_size}" + response = await self.client.get(url) + response.raise_for_status() + data = response.json() + return AsyncGetDocumentPage(self, data) + + async def get_documents_generator(self): + """Shortcut Generator for get_documents. Generator to iterate through all documents for a collection in an app + + Yields: + Document: The Document object of the next Document + + """ + page = 1 + page_size = 50 + get_documents_page= await self.get_documents(page, page_size) + while True: + for document in get_documents_page.items: + yield document + + new_documents = await get_documents_page.next() + if not new_documents: + break + + get_documents_page = new_documents + + async def query(self, query: str, top_k: int = 5) -> List[Document]: + """query the documents by cosine distance + Args: + query (str): The query string to compare other embeddings too + top_k (int, optional): The number of results to return. Defaults to 5 max 50 + + Returns: + List[Document]: The response from the query with matching documents + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/query?query={query}&top_k={top_k}" + response = await self.client.get(url) + response.raise_for_status() + data = [ + Document( + collection_id=self.id, + content=document["content"], + id=document["id"], + created_at=document["created_at"], + metadata=document["metadata"] + ) + for document in response.json() + ] + return data + + async def update_document(self, document: Document, content: Optional[str], metadata: Optional[Dict]) -> Document: + """Update a document in the collection + + Args: + document (Document): The Document to update + metadata (Dict): The metadata of the document + content (str): The content of the document + + Returns: + Document: The newly updated Document + """ + if metadata is None and content is None: + raise ValueError("metadata and content cannot both be None") + data = {"metadata": metadata, "content": content} + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document.id}" + response = await self.client.put(url, json=data) + response.raise_for_status() + data = response.json() + return Document( + data["id"], + metadata=data["metadata"], + content=data["content"], + created_at=data["created_at"], + collection_id=data["collection_id"], + ) + + async def delete_document(self, document: Document) -> bool: + """Delete a document from the collection + + Args: + document (Document): The Document to delete + + Returns: + boolean: Whether the document was successfully deleted + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document.id}" + response = await self.client.delete(url) + response.raise_for_status() + success = response.status_code < 400 + return success diff --git a/sdk/honcho/schemas.py b/sdk/honcho/schemas.py index d2970c9..b5f74d2 100644 --- a/sdk/honcho/schemas.py +++ b/sdk/honcho/schemas.py @@ -24,3 +24,16 @@ def __init__(self, id: uuid.UUID, message_id: uuid.UUID, metamessage_type: str, def __str__(self): return f"Metamessage(id={self.id}, message_id={self.message_id}, metamessage_type={self.metamessage_type}, content={self.content})" + +class Document: + def __init__(self, id: uuid.UUID, collection_id: uuid.UUID, content: str, metadata: dict, created_at: datetime.datetime): + """Constructor for Document""" + self.collection_id = collection_id + self.id = id + self.content = content + self.metadata = metadata + self.created_at = created_at + + def __str__(self) -> str: + return f"Document(id={self.id}, metadata={self.metadata}, content={self.content}, created_at={self.created_at})" + diff --git a/sdk/honcho/sync_client.py b/sdk/honcho/sync_client.py index 2adc4e4..72c5261 100644 --- a/sdk/honcho/sync_client.py +++ b/sdk/honcho/sync_client.py @@ -1,8 +1,8 @@ -import json import uuid -from typing import Dict, Optional +import datetime +from typing import Dict, Optional, List import httpx -from .schemas import Message, Metamessage +from .schemas import Message, Metamessage, Document class GetPage: """Base class for receiving Paginated API results""" @@ -44,7 +44,8 @@ def __init__(self, client, options: Dict, response: Dict): user_id=session["user_id"], location_id=session["location_id"], is_active=session["is_active"], - session_data=session["session_data"], + metadata=session["metadata"], + created_at=session["created_at"], ) for session in response["items"] ] @@ -97,13 +98,13 @@ def __init__(self, session, options: Dict, response: Dict) -> None: Args: session (Session): Session the returned messages are associated with - options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are message_id and metamessage_type which are both required + options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are message_id and metamessage_type which are both optional response (Dict): Response from API with pagination information """ super().__init__(response) self.session = session - self.message_id = options["message_id"] - self.metamessage_type = options["metamessage_type"] + self.message_id = options["message_id"] if "message_id" in options else None + self.metamessage_type = options["metamessage_type"] if "metamessage_type" in options else None self.items = [ Metamessage( id=metamessage["id"], @@ -124,7 +125,70 @@ def next(self): return None return self.session.get_metamessages(metamessage_type=self.metamessage_type, message=self.message_id, page=(self.page + 1), page_size=self.page_size) +class GetDocumentPage(GetPage): + """Paginated results for Get Document requests""" + def __init__(self, collection, response: Dict) -> None: + """Constructor for Page Result from Document Get Request + + Args: + collection (Collection): Collection the returned documents are associated with + response (Dict): Response from API with pagination information + """ + super().__init__(response) + self.collection = collection + self.items = [ + Document( + id=document["id"], + collection_id=collection.id, + content=document["content"], + metadata=document["metadata"], + created_at=document["created_at"], + ) + for document in response["items"] + ] + def next(self): + """Get the next page of results + Returns: + GetDocumentPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + """ + if self.page >= self.pages: + return None + return self.collection.get_documents(page=self.page + 1, page_size=self.page_size) + +class GetCollectionPage(GetPage): + """Paginated results for Get Collection requests""" + + def __init__(self, client, options: Dict, response: Dict): + """Constructor for page result from Get Collection Request + + Args: + client ( Client): Honcho Client + options (Dict): Options for the request used mainly for next() to filter queries. The only parameter available is user_id which is required + response (Dict): Response from API with pagination information + """ + super().__init__(response) + self.client = client + self.user_id = options["user_id"] + self.items = [ + Collection( + client=client, + id=collection["id"], + user_id=collection["user_id"], + name=collection["name"], + created_at=collection["created_at"], + ) + for collection in response["items"] + ] + + def next(self): + """Get the next page of results + Returns: + GetCollectionPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + """ + if self.page >= self.pages: + return None + return self.client.get_collections(user_id=self.user_id, page=self.page + 1, page_size=self.page_size) class Client: """Honcho API Client Object""" @@ -161,7 +225,8 @@ def get_session(self, user_id: str, session_id: uuid.UUID): user_id=data["user_id"], location_id=data["location_id"], is_active=data["is_active"], - session_data=data["session_data"], + metadata=data["metadata"], + created_at=data["created_at"] ) def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: int = 1, page_size: int = 50): @@ -215,20 +280,20 @@ def get_sessions_generator(self, user_id: str, location_id: Optional[str] = None get_session_response = new_sessions def create_session( - self, user_id: str, location_id: str = "default", session_data: Dict = {} + self, user_id: str, location_id: str = "default", metadata: Dict = {} ): """Create a session for a user Args: user_id (str): The User ID representing the user, managed by the user location_id (str, optional): Optional Location ID representing the location of a session - session_data (Dict, optional): Optional session metadata + metadata (Dict, optional): Optional session metadata Returns: Session: The Session object of the new Session """ - data = {"location_id": location_id, "session_data": session_data} + data = {"location_id": location_id, "metadata": metadata} url = f"{self.common_prefix}/users/{user_id}/sessions" response = self.client.post(url, json=data) response.raise_for_status() @@ -238,10 +303,103 @@ def create_session( id=data["id"], user_id=user_id, location_id=location_id, - session_data=session_data, + metadata=metadata, is_active=data["is_active"], + created_at=data["created_at"], + ) + + def create_collection( + self, user_id: str, name: str, + ): + """Create a collection for a user + + Args: + user_id (str): The User ID representing the user, managed by the user + name (str): unique name for the collection for the user + + Returns: + Collection: The Collection object of the new Collection + + """ + data = {"name": name} + url = f"{self.common_prefix}/users/{user_id}/collections" + response = self.client.post(url, json=data) + response.raise_for_status() + data = response.json() + return Collection( + self, + id=data["id"], + user_id=user_id, + name=name, + created_at=data["created_at"], ) + def get_collection(self, user_id: str, name: str): + """Get a specific collection for a user by name + + Args: + user_id (str): The User ID representing the user, managed by the user + name (str): The name of the collection to get + + Returns: + Collection: The Session object of the requested Session + + """ + url = f"{self.common_prefix}/users/{user_id}/collections/name/{name}" + response = self.client.get(url) + response.raise_for_status() + data = response.json() + return Collection( + client=self, + id=data["id"], + user_id=data["user_id"], + name=data["name"], + created_at=data["created_at"] + ) + + def get_collections(self, user_id: str, page: int = 1, page_size: int = 50): + """Return collections associated with a user paginated + + Args: + user_id (str): The User ID representing the user to get the collection for + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return + + Returns: + GetCollectionPage: Page or results for get_collections query + + """ + url = f"{self.common_prefix}/users/{user_id}/collections/all?page={page}&size={page_size}" + response = self.client.get(url) + response.raise_for_status() + data = response.json() + options = {"user_id": user_id} + return GetCollectionPage(self, options, data) + + def get_collections_generator(self, user_id: str): + """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app + + Args: + user_id (str): The User ID representing the user, managed by the user + + Yields: + Collection: The Session object of the requested Session + + """ + page = 1 + page_size = 50 + get_collection_response = self.get_collections(user_id, page, page_size) + while True: + # get_collection_response = self.get_collections(user_id, location_id, page, page_size) + for collection in get_collection_response.items: + yield collection + + new_collections = get_collection_response.next() + if not new_collections: + break + + get_collection_response = new_collections + class Session: """Represents a single session for a user in an app""" @@ -252,20 +410,20 @@ def __init__( id: uuid.UUID, user_id: str, location_id: str, - session_data: dict | str, + metadata: dict, is_active: bool, + created_at: datetime.datetime ): """Constructor for Session""" - self.base_url = client.base_url - self.client = client.client - self.app_id = client.app_id - self.id = id - self.user_id = user_id - self.location_id = location_id - self.session_data = ( - session_data if isinstance(session_data, dict) else json.loads(session_data) - ) - self._is_active = is_active + self.base_url: str = client.base_url + self.client: httpx.Client = client.client + self.app_id: str = client.app_id + self.id: uuid.UUID = id + self.user_id: str = user_id + self.location_id: str = location_id + self.metadata: dict = metadata + self._is_active: bool = is_active + self.created_at: datetime.datetime = created_at @property def common_prefix(self): @@ -274,7 +432,8 @@ def common_prefix(self): def __str__(self): """String representation of Session""" - return f"Session(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, session_data={self.session_data}, is_active={self.is_active})" + return f"Session(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, metadata={self.metadata}, is_active={self.is_active})" + @property def is_active(self): @@ -360,7 +519,7 @@ def create_metamessage(self, message: Message, metamessage_type: str, content: s Args: message (Message): A message to associate the metamessage with - metamessage_type (str): The type of the metamessage arbitrary itentifier + metamessage_type (str): The type of the metamessage arbitrary identifier content (str): The content of the metamessage Returns: @@ -444,21 +603,20 @@ def get_metamessages_generator(self, metamessage_type: Optional[str] = None, mes get_metamessages_page = new_messages - def update(self, session_data: Dict): - """Update the session_data of a session + def update(self, metadata: Dict): + """Update the metadata of a session Args: - session_data (Dict): The Session object containing any new session_data - + metadata (Dict): The Session object containing any new metadata Returns: boolean: Whether the session was successfully updated """ - info = {"session_data": session_data} + info = {"metadata": metadata} url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}" response = self.client.put(url, json=info) success = response.status_code < 400 - self.session_data = session_data + self.metadata = metadata return success def close(self): @@ -468,3 +626,202 @@ def close(self): response.raise_for_status() self._is_active = False +class Collection: + """Represents a single collection for a user in an app""" + + def __init__( + self, + client: Client, + id: uuid.UUID, + user_id: str, + name: str, + created_at: datetime.datetime, + ): + """Constructor for Collection""" + self.base_url: str = client.base_url + self.client: httpx.Client = client.client + self.app_id: str = client.app_id + self.id: uuid.UUID = id + self.user_id: str = user_id + self.name: str = name + self.created_at: datetime.datetime = created_at + + @property + def common_prefix(self): + """Shortcut for common API prefix. made a property to prevent tampering""" + return f"{self.base_url}/apps/{self.app_id}" + + def __str__(self): + """String representation of Collection""" + return f"Collection(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, name={self.name}, created_at={self.created_at})" + + def update(self, name: str): + """Update the name of the collection + + Args: + name (str): The new name of the document + + Returns: + boolean: Whether the session was successfully updated + """ + info = {"name": name} + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}" + response = self.client.put(url, json=info) + response.raise_for_status() + success = response.status_code < 400 + self.name = name + return success + + def delete(self): + """Delete a collection and all associated documents""" + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}" + response = self.client.delete(url) + response.raise_for_status() + + def create_document(self, content: str, metadata: Dict = {}): + """Adds a document to the collection + + Args: + content (str): The content of the document + metadata (Dict): The metadata of the document + + Returns: + Document: The Document object of the added document + + """ + data = {"metadata": metadata, "content": content} + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents" + response = self.client.post(url, json=data) + response.raise_for_status() + data = response.json() + return Document( + collection_id=self.id, + id=data["id"], + metadata=metadata, + content=content, + created_at=data["created_at"] + ) + + def get_document(self, document_id: uuid.UUID) -> Document: + """Get a specific document for a collection based on ID + + Args: + document_id (uuid.UUID): The ID of the Document to retrieve + + Returns: + Document: The Document object + + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document_id}" + response = self.client.get(url) + response.raise_for_status() + data = response.json() + return Document( + collection_id=self.id, + id=data["id"], + metadata=data["metadata"], + content=data["content"], + created_at=data["created_at"] + ) + + def get_documents(self, page: int = 1, page_size: int = 50) -> GetDocumentPage: + """Get all documents for a collection + + Args: + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return per page + + Returns: + GetDocumentPage: Page of Document objects + + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents?page={page}&size={page_size}" + response = self.client.get(url) + response.raise_for_status() + data = response.json() + return GetDocumentPage(self, data) + + def get_documents_generator(self): + """Shortcut Generator for get_documents. Generator to iterate through all documents for a collection in an app + + Yields: + Document: The Document object of the next Document + + """ + page = 1 + page_size = 50 + get_documents_page= self.get_documents(page, page_size) + while True: + for document in get_documents_page.items: + yield document + + new_documents = get_documents_page.next() + if not new_documents: + break + + get_documents_page = new_documents + + def query(self, query: str, top_k: int = 5) -> List[Document]: + """query the documents by cosine distance + Args: + query (str): The query string to compare other embeddings too + top_k (int, optional): The number of results to return. Defaults to 5 max 50 + + Returns: + List[Document]: The response from the query with matching documents + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/query?query={query}&top_k={top_k}" + response = self.client.get(url) + response.raise_for_status() + data = [ + Document( + collection_id=self.id, + content=document["content"], + id=document["id"], + created_at=document["created_at"], + metadata=document["metadata"] + ) + for document in response.json() + ] + return data + + def update_document(self, document: Document, content: Optional[str], metadata: Optional[Dict]) -> Document: + """Update a document in the collection + + Args: + document (Document): The Document to update + metadata (Dict): The metadata of the document + content (str): The content of the document + + Returns: + Document: The newly updated Document + """ + if metadata is None and content is None: + raise ValueError("metadata and content cannot both be None") + data = {"metadata": metadata, "content": content} + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document.id}" + response = self.client.put(url, json=data) + response.raise_for_status() + data = response.json() + return Document( + data["id"], + metadata=data["metadata"], + content=data["content"], + created_at=data["created_at"], + collection_id=data["collection_id"], + ) + + def delete_document(self, document: Document) -> bool: + """Delete a document from the collection + + Args: + document (Document): The Document to delete + + Returns: + boolean: Whether the document was successfully deleted + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document.id}" + response = self.client.delete(url) + response.raise_for_status() + success = response.status_code < 400 + return success diff --git a/sdk/pyproject.toml b/sdk/pyproject.toml index ff695b5..1455bca 100644 --- a/sdk/pyproject.toml +++ b/sdk/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "honcho-ai" -version = "0.0.2" +version = "0.0.3" description = "Python Client SDK for Honcho" authors = ["Plastic Labs "] license = "AGPL-3.0" diff --git a/sdk/tests/test_async.py b/sdk/tests/test_async.py index 8e5904d..f47942b 100644 --- a/sdk/tests/test_async.py +++ b/sdk/tests/test_async.py @@ -1,5 +1,5 @@ import pytest -from honcho import AsyncGetSessionPage, AsyncGetMessagePage, AsyncGetMetamessagePage, AsyncSession, Message, Metamessage +from honcho import AsyncGetSessionPage, AsyncGetMessagePage, AsyncGetMetamessagePage, AsyncGetDocumentPage, AsyncSession, Message, Metamessage, Document from honcho import AsyncClient as Honcho from uuid import uuid1 @@ -14,7 +14,7 @@ async def test_session_creation_retrieval(): assert retrieved_session.id == created_session.id assert retrieved_session.is_active is True assert retrieved_session.location_id == "default" - assert retrieved_session.session_data == {} + assert retrieved_session.metadata == {} @pytest.mark.asyncio @@ -40,7 +40,7 @@ async def test_session_update(): created_session = await client.create_session(user_id) assert await created_session.update({"foo": "bar"}) retrieved_session = await client.get_session(user_id, created_session.id) - assert retrieved_session.session_data == {"foo": "bar"} + assert retrieved_session.metadata == {"foo": "bar"} @pytest.mark.asyncio @@ -271,4 +271,98 @@ async def test_paginated_metamessages_generator(): await gen.__anext__() +@pytest.mark.asyncio +async def test_collections(): + col_name = str(uuid1()) + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + # Make a collection + collection = await client.create_collection(user_id, col_name) + + # Add documents + doc1 = await collection.create_document(content="This is a test of documents - 1", metadata={"foo": "bar"}) + doc2 = await collection.create_document(content="This is a test of documents - 2", metadata={}) + doc3 = await collection.create_document(content="This is a test of documents - 3", metadata={}) + + # Get all documents + page = await collection.get_documents(page=1, page_size=3) + # Verify size + assert page is not None + assert isinstance(page, AsyncGetDocumentPage) + assert len(page.items) == 3 + # delete a doc + result = await collection.delete_document(doc1) + assert result is True + # Get all documents with a generator this time + gen = collection.get_documents_generator() + # Verfy size + item = await gen.__anext__() + item2 = await gen.__anext__() + with pytest.raises(StopAsyncIteration): + await gen.__anext__() + # delete the collection + result = await collection.delete() + # confirm documents are gone + with pytest.raises(Exception): + new_col = await client.get_collection(user_id, "test") + +@pytest.mark.asyncio +async def test_collection_name_collision(): + col_name = str(uuid1()) + new_col_name = str(uuid1()) + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + # Make a collection + collection = await client.create_collection(user_id, col_name) + # Make another collection + with pytest.raises(Exception): + await client.create_collection(user_id, col_name) + + # Change the name of original collection + result = await collection.update(new_col_name) + assert result is True + + # Try again to add another collection + collection2 = await client.create_collection(user_id, col_name) + assert collection2 is not None + assert collection2.name == col_name + assert collection.name == new_col_name + + # Get all collections + page = await client.get_collections(user_id) + assert page is not None + assert len(page.items) == 2 + +@pytest.mark.asyncio +async def test_collection_query(): + col_name = str(uuid1()) + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + # Make a collection + collection = await client.create_collection(user_id, col_name) + + # Add documents + doc1 = await collection.create_document(content="The user loves puppies", metadata={}) + doc2 = await collection.create_document(content="The user owns a dog", metadata={}) + doc3 = await collection.create_document(content="The user is a doctor", metadata={}) + + result = await collection.query(query="does the user own pets", top_k=2) + + assert result is not None + assert len(result) == 2 + assert isinstance(result[0], Document) + + doc3 = await collection.update_document(doc3, metadata={"test": "test"}, content="the user has owned pets in the past") + assert doc3 is not None + assert doc3.metadata == {"test": "test"} + assert doc3.content == "the user has owned pets in the past" + + result = await collection.query(query="does the user own pets", top_k=2) + + assert result is not None + assert len(result) == 2 + assert isinstance(result[0], Document) diff --git a/sdk/tests/test_sync.py b/sdk/tests/test_sync.py index 135cc61..a0367ad 100644 --- a/sdk/tests/test_sync.py +++ b/sdk/tests/test_sync.py @@ -1,7 +1,8 @@ -from honcho import GetSessionPage, GetMessagePage, GetMetamessagePage, Session, Message, Metamessage +import pytest +from honcho import GetSessionPage, GetMessagePage, GetMetamessagePage, GetDocumentPage, Session, Message, Metamessage, Document from honcho import Client as Honcho from uuid import uuid1 -import pytest + def test_session_creation_retrieval(): app_id = str(uuid1()) @@ -12,7 +13,7 @@ def test_session_creation_retrieval(): assert retrieved_session.id == created_session.id assert retrieved_session.is_active is True assert retrieved_session.location_id == "default" - assert retrieved_session.session_data == {} + assert retrieved_session.metadata == {} def test_session_multiple_retrieval(): @@ -30,18 +31,18 @@ def test_session_multiple_retrieval(): def test_session_update(): - app_id = str(uuid1()) user_id = str(uuid1()) + app_id = str(uuid1()) client = Honcho(app_id, "http://localhost:8000") created_session = client.create_session(user_id) assert created_session.update({"foo": "bar"}) retrieved_session = client.get_session(user_id, created_session.id) - assert retrieved_session.session_data == {"foo": "bar"} + assert retrieved_session.metadata == {"foo": "bar"} def test_session_deletion(): - app_id = str(uuid1()) user_id = str(uuid1()) + app_id = str(uuid1()) client = Honcho(app_id, "http://localhost:8000") created_session = client.create_session(user_id) assert created_session.is_active is True @@ -53,8 +54,8 @@ def test_session_deletion(): def test_messages(): - app_id = str(uuid1()) user_id = str(uuid1()) + app_id = str(uuid1()) client = Honcho(app_id, "http://localhost:8000") created_session = client.create_session(user_id) created_session.create_message(is_user=True, content="Hello") @@ -128,13 +129,13 @@ def test_paginated_sessions_generator(): gen = client.get_sessions_generator(user_id) # print(type(gen)) - item = next(gen) + item = gen.__next__() assert item.user_id == user_id assert isinstance(item, Session) - assert next(gen) is not None - assert next(gen) is not None + assert gen.__next__() is not None + assert gen.__next__() is not None with pytest.raises(StopIteration): - next(gen) + gen.__next__() def test_paginated_out_of_bounds(): app_id = str(uuid1()) @@ -183,7 +184,6 @@ def test_paginated_messages(): assert next_page is None - def test_paginated_messages_generator(): app_id = str(uuid1()) user_id = str(uuid1()) @@ -193,17 +193,16 @@ def test_paginated_messages_generator(): created_session.create_message(is_user=False, content="Hi") gen = created_session.get_messages_generator() - item = next(gen) + item = gen.__next__() assert isinstance(item, Message) assert item.content == "Hello" assert item.is_user is True - item2 = next(gen) + item2 = gen.__next__() assert item2 is not None assert item2.content == "Hi" assert item2.is_user is False with pytest.raises(StopIteration): - next(gen) - + gen.__next__() def test_paginated_metamessages(): app_id = str(uuid1()) @@ -246,16 +245,107 @@ def test_paginated_metamessages_generator(): created_session.create_metamessage(message=message, metamessage_type="thought", content="Test 2") gen = created_session.get_metamessages_generator() - item = next(gen) + item = gen.__next__() assert isinstance(item, Metamessage) assert item.content == "Test 1" assert item.metamessage_type == "thought" - item2 = next(gen) + item2 = gen.__next__() assert item2 is not None assert item2.content == "Test 2" assert item2.metamessage_type == "thought" with pytest.raises(StopIteration): - next(gen) + gen.__next__() + + +def test_collections(): + col_name = str(uuid1()) + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + # Make a collection + collection = client.create_collection(user_id, col_name) + + # Add documents + doc1 = collection.create_document(content="This is a test of documents - 1", metadata={"foo": "bar"}) + doc2 = collection.create_document(content="This is a test of documents - 2", metadata={}) + doc3 = collection.create_document(content="This is a test of documents - 3", metadata={}) + + # Get all documents + page = collection.get_documents(page=1, page_size=3) + # Verify size + assert page is not None + assert isinstance(page, GetDocumentPage) + assert len(page.items) == 3 + # delete a doc + result = collection.delete_document(doc1) + assert result is True + # Get all documents with a generator this time + gen = collection.get_documents_generator() + # Verfy size + item = gen.__next__() + item2 = gen.__next__() + with pytest.raises(StopIteration): + gen.__next__() + # delete the collection + result = collection.delete() + # confirm documents are gone + with pytest.raises(Exception): + new_col = client.get_collection(user_id, "test") + +def test_collection_name_collision(): + col_name = str(uuid1()) + new_col_name = str(uuid1()) + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + # Make a collection + collection = client.create_collection(user_id, col_name) + # Make another collection + with pytest.raises(Exception): + client.create_collection(user_id, col_name) + + # Change the name of original collection + result = collection.update(new_col_name) + assert result is True + + # Try again to add another collection + collection2 = client.create_collection(user_id, col_name) + assert collection2 is not None + assert collection2.name == col_name + assert collection.name == new_col_name + + # Get all collections + page = client.get_collections(user_id) + assert page is not None + assert len(page.items) == 2 + +def test_collection_query(): + col_name = str(uuid1()) + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + # Make a collection + collection = client.create_collection(user_id, col_name) + + # Add documents + doc1 = collection.create_document(content="The user loves puppies", metadata={}) + doc2 = collection.create_document(content="The user owns a dog", metadata={}) + doc3 = collection.create_document(content="The user is a doctor", metadata={}) + + result = collection.query(query="does the user own pets", top_k=2) + + assert result is not None + assert len(result) == 2 + assert isinstance(result[0], Document) + + doc3 = collection.update_document(doc3, metadata={"test": "test"}, content="the user has owned pets in the past") + assert doc3 is not None + assert doc3.metadata == {"test": "test"} + assert doc3.content == "the user has owned pets in the past" + result = collection.query(query="does the user own pets", top_k=2) + assert result is not None + assert len(result) == 2 + assert isinstance(result[0], Document)