diff --git a/examples/models/groq_models.py b/examples/models/groq_models.py index 228b5c1..515a068 100644 --- a/examples/models/groq_models.py +++ b/examples/models/groq_models.py @@ -18,7 +18,7 @@ def analyze(text: str) -> SentimentAnalysis: if __name__ == "__main__": # set global llm - settings.llm = ChatGroq(model="llama3-70b-8192") + settings.llm = ChatGroq(model="llama3-70b-8192") # type: ignore # run prompt result = analyze("I really like when my dog does a trick!") diff --git a/pyproject.toml b/pyproject.toml index 03984bf..a54bf84 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "funcchain" -version = "0.3.5" +version = "0.3.6" description = "🔖 write prompts as python functions" authors = [{ name = "Shroominic", email = "contact@shroominic.com" }] dependencies = [ diff --git a/requirements-dev.lock b/requirements-dev.lock index fc8c55f..0826bcb 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -7,65 +7,68 @@ # all-features: false # with-sources: false # generate-hashes: false +# universal: false -e file:. -aiohttp==3.9.5 +aiohappyeyeballs==2.4.3 + # via aiohttp +aiohttp==3.11.4 # via langchain # via langchain-community aiosignal==1.3.1 # via aiohttp annotated-types==0.7.0 # via pydantic -anthropic==0.28.1 +anthropic==0.39.0 # via langchain-anthropic -anyio==4.4.0 +anyio==4.6.2.post1 # via anthropic # via groq # via httpx # via openai asttokens==2.4.1 # via stack-data -attrs==23.2.0 +attrs==24.2.0 # via aiohttp -babel==2.15.0 +babel==2.16.0 # via mkdocs-material backoff==2.2.1 # via unstructured beautifulsoup4==4.12.3 # via funcchain # via unstructured -cachetools==5.3.3 +cachetools==5.5.0 # via google-auth -certifi==2024.6.2 +certifi==2024.8.30 # via httpcore # via httpx # via requests - # via unstructured-client +cffi==1.17.1 + # via cryptography cfgv==3.4.0 # via pre-commit chardet==5.2.0 # via unstructured -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests - # via unstructured-client click==8.1.7 # via mkdocs # via nltk + # via python-oxmsg colorama==0.4.6 # via mkdocs-material +cryptography==43.0.3 + # via unstructured-client dataclasses-json==0.6.7 # via langchain-community # via unstructured - # via unstructured-client decorator==5.1.1 # via ipython -deepdiff==7.0.1 - # via unstructured-client defusedxml==0.7.1 # via langchain-anthropic diskcache==5.6.3 # via llama-cpp-python -distlib==0.3.8 +distlib==0.3.9 # via virtualenv distro==1.9.0 # via anthropic @@ -73,33 +76,35 @@ distro==1.9.0 # via openai docstring-parser==0.16 # via funcchain -emoji==2.12.1 +emoji==2.14.0 # via unstructured -executing==2.0.1 +eval-type-backport==0.2.0 + # via unstructured-client +executing==2.1.0 # via stack-data -faiss-cpu==1.8.0 +faiss-cpu==1.9.0 # via funcchain -filelock==3.15.1 +filelock==3.16.1 # via huggingface-hub # via virtualenv filetype==1.2.0 # via unstructured -frozenlist==1.4.1 +frozenlist==1.5.0 # via aiohttp # via aiosignal -fsspec==2024.6.0 +fsspec==2024.10.0 # via huggingface-hub ghp-import==2.1.0 # via mkdocs -google-ai-generativelanguage==0.6.4 +google-ai-generativelanguage==0.6.10 # via google-generativeai -google-api-core==2.19.0 +google-api-core==2.23.0 # via google-ai-generativelanguage # via google-api-python-client # via google-generativeai -google-api-python-client==2.133.0 +google-api-python-client==2.153.0 # via google-generativeai -google-auth==2.30.0 +google-auth==2.36.0 # via google-ai-generativelanguage # via google-api-core # via google-api-python-client @@ -107,54 +112,58 @@ google-auth==2.30.0 # via google-generativeai google-auth-httplib2==0.2.0 # via google-api-python-client -google-generativeai==0.5.4 +google-generativeai==0.8.3 # via langchain-google-genai -googleapis-common-protos==1.63.1 +googleapis-common-protos==1.66.0 # via google-api-core # via grpcio-status -groq==0.9.0 +groq==0.12.0 # via langchain-groq -grpcio==1.64.1 +grpcio==1.68.0 # via google-api-core # via grpcio-status -grpcio-status==1.62.2 +grpcio-status==1.68.0 # via google-api-core h11==0.14.0 # via httpcore -httpcore==1.0.5 +html5lib==1.1 + # via unstructured +httpcore==1.0.7 # via httpx httplib2==0.22.0 # via google-api-python-client # via google-auth-httplib2 -httpx==0.27.0 +httpx==0.27.2 # via anthropic # via groq + # via langsmith # via openai # via unstructured-client -huggingface-hub==0.23.4 +httpx-sse==0.4.0 + # via langchain-community +huggingface-hub==0.26.2 # via funcchain - # via tokenizers -identify==2.5.36 +identify==2.6.2 # via pre-commit -idna==3.7 +idna==3.10 # via anyio # via httpx # via requests - # via unstructured-client # via yarl iniconfig==2.0.0 # via pytest -ipython==8.25.0 +ipython==8.29.0 isort==5.13.2 -jedi==0.19.1 +jedi==0.19.2 # via ipython jinja2==3.1.4 # via funcchain # via llama-cpp-python # via mkdocs # via mkdocs-material -jiter==0.4.2 +jiter==0.7.1 # via anthropic + # via openai joblib==1.4.2 # via nltk jsonpatch==1.33 @@ -163,14 +172,14 @@ jsonpath-python==1.0.6 # via unstructured-client jsonpointer==3.0.0 # via jsonpatch -langchain==0.2.5 +langchain==0.3.7 # via funcchain # via langchain-community -langchain-anthropic==0.1.15 +langchain-anthropic==0.3.0 # via funcchain -langchain-community==0.2.5 +langchain-community==0.3.7 # via funcchain -langchain-core==0.2.7 +langchain-core==0.3.19 # via funcchain # via langchain # via langchain-anthropic @@ -179,36 +188,35 @@ langchain-core==0.2.7 # via langchain-groq # via langchain-openai # via langchain-text-splitters -langchain-google-genai==1.0.6 +langchain-google-genai==2.0.4 # via funcchain -langchain-groq==0.1.5 +langchain-groq==0.2.1 # via funcchain -langchain-openai==0.1.8 +langchain-openai==0.2.9 # via funcchain -langchain-text-splitters==0.2.1 +langchain-text-splitters==0.3.2 # via langchain langdetect==1.0.9 # via unstructured -langsmith==0.1.77 +langsmith==0.1.143 # via langchain # via langchain-community # via langchain-core -llama-cpp-python==0.2.78 +llama-cpp-python==0.3.2 # via funcchain -lxml==5.2.2 +lxml==5.3.0 # via unstructured -markdown==3.6 +markdown==3.7 # via mkdocs # via mkdocs-material # via pymdown-extensions markdown-it-py==3.0.0 # via rich -markupsafe==2.1.5 +markupsafe==3.0.2 # via jinja2 # via mkdocs -marshmallow==3.21.3 +marshmallow==3.23.1 # via dataclasses-json - # via unstructured-client matplotlib-inline==0.1.7 # via ipython mdurl==0.1.2 @@ -216,24 +224,23 @@ mdurl==0.1.2 mergedeep==1.3.4 # via mkdocs # via mkdocs-get-deps -mkdocs==1.6.0 +mkdocs==1.6.1 # via mkdocs-material mkdocs-get-deps==0.2.0 # via mkdocs -mkdocs-material==9.5.26 +mkdocs-material==9.5.44 mkdocs-material-extensions==1.3.1 # via mkdocs-material -multidict==6.0.5 +multidict==6.1.0 # via aiohttp # via yarl -mypy==1.10.0 +mypy==1.13.0 mypy-extensions==1.0.0 # via mypy # via typing-inspect - # via unstructured-client nest-asyncio==1.6.0 # via unstructured-client -nltk==3.8.1 +nltk==3.9.1 # via unstructured nodeenv==1.9.1 # via pre-commit @@ -243,20 +250,20 @@ numpy==1.26.4 # via langchain-community # via llama-cpp-python # via unstructured -openai==1.34.0 +olefile==0.47 + # via python-oxmsg +openai==1.54.4 # via langchain-openai -ordered-set==4.1.0 - # via deepdiff -orjson==3.10.5 +orjson==3.10.11 # via langsmith -packaging==24.1 +packaging==24.2 + # via faiss-cpu # via huggingface-hub # via langchain-core # via marshmallow # via mkdocs # via pytest - # via unstructured-client -paginate==0.5.6 +paginate==0.5.7 # via mkdocs-material parso==0.8.4 # via jedi @@ -268,72 +275,85 @@ pdfminer==20191125 # via funcchain pexpect==4.9.0 # via ipython -pillow==10.3.0 +pillow==11.0.0 # via funcchain # via pdf2image -platformdirs==4.2.2 +platformdirs==4.3.6 # via mkdocs-get-deps # via virtualenv pluggy==1.5.0 # via pytest -pre-commit==3.7.1 -prompt-toolkit==3.0.47 +pre-commit==4.0.1 +prompt-toolkit==3.0.48 # via ipython -proto-plus==1.23.0 +propcache==0.2.0 + # via aiohttp + # via yarl +proto-plus==1.25.0 # via google-ai-generativelanguage # via google-api-core -protobuf==4.25.3 +protobuf==5.28.3 # via google-ai-generativelanguage # via google-api-core # via google-generativeai # via googleapis-common-protos # via grpcio-status # via proto-plus +psutil==6.1.0 + # via unstructured ptyprocess==0.7.0 # via pexpect -pure-eval==0.2.2 +pure-eval==0.2.3 # via stack-data -pyasn1==0.6.0 +pyasn1==0.6.1 # via pyasn1-modules # via rsa -pyasn1-modules==0.4.0 +pyasn1-modules==0.4.1 # via google-auth -pycryptodome==3.20.0 +pycparser==2.22 + # via cffi +pycryptodome==3.21.0 # via pdfminer -pydantic==2.7.4 +pydantic==2.9.2 # via anthropic # via google-generativeai # via groq # via langchain + # via langchain-anthropic # via langchain-core + # via langchain-google-genai # via langsmith # via openai # via pydantic-settings -pydantic-core==2.18.4 + # via unstructured-client +pydantic-core==2.23.4 # via pydantic -pydantic-settings==2.3.3 +pydantic-settings==2.6.1 # via funcchain + # via langchain-community pygments==2.18.0 # via ipython # via mkdocs-material # via rich -pymdown-extensions==10.8.1 +pymdown-extensions==10.12 # via mkdocs-material -pyparsing==3.1.2 +pyparsing==3.2.0 # via httplib2 -pypdf==4.2.0 +pypdf==5.1.0 # via unstructured-client -pytest==8.2.2 -python-dateutil==2.9.0.post0 +pytest==8.3.3 +python-dateutil==2.8.2 # via ghp-import # via unstructured-client python-dotenv==1.0.1 # via pydantic-settings -python-iso639==2024.4.27 +python-iso639==2024.10.22 # via unstructured python-magic==0.4.27 # via unstructured -pyyaml==6.0.1 +python-oxmsg==0.0.1 + # via unstructured +pyyaml==6.0.2 # via huggingface-hub # via langchain # via langchain-community @@ -345,9 +365,9 @@ pyyaml==6.0.1 # via pyyaml-env-tag pyyaml-env-tag==0.1 # via mkdocs -rapidfuzz==3.9.3 +rapidfuzz==3.10.1 # via unstructured -regex==2024.5.15 +regex==2024.11.6 # via mkdocs-material # via nltk # via tiktoken @@ -361,42 +381,38 @@ requests==2.32.3 # via requests-toolbelt # via tiktoken # via unstructured - # via unstructured-client requests-toolbelt==1.0.0 + # via langsmith # via unstructured-client -rich==13.7.1 +rich==13.9.4 rsa==4.9 # via google-auth -ruff==0.4.9 +ruff==0.7.4 six==1.16.0 # via asttokens + # via html5lib # via langdetect # via python-dateutil - # via unstructured-client sniffio==1.3.1 # via anthropic # via anyio # via groq # via httpx # via openai -soupsieve==2.5 +soupsieve==2.6 # via beautifulsoup4 -sqlalchemy==2.0.30 +sqlalchemy==2.0.35 # via langchain # via langchain-community stack-data==0.6.3 # via ipython -tabulate==0.9.0 - # via unstructured -tenacity==8.3.0 +tenacity==9.0.0 # via langchain # via langchain-community # via langchain-core -tiktoken==0.7.0 +tiktoken==0.8.0 # via langchain-openai -tokenizers==0.19.1 - # via anthropic -tqdm==4.66.4 +tqdm==4.67.0 # via google-generativeai # via huggingface-hub # via nltk @@ -405,42 +421,42 @@ tqdm==4.66.4 traitlets==5.14.3 # via ipython # via matplotlib-inline -types-pyyaml==6.0.12.20240311 +types-pyyaml==6.0.12.20240917 typing-extensions==4.12.2 # via anthropic - # via emoji # via google-generativeai # via groq # via huggingface-hub - # via ipython + # via langchain-core # via llama-cpp-python # via mypy # via openai # via pydantic # via pydantic-core + # via python-oxmsg # via sqlalchemy # via typing-inspect # via unstructured - # via unstructured-client typing-inspect==0.9.0 # via dataclasses-json # via unstructured-client -unstructured==0.14.6 +unstructured==0.16.5 # via funcchain -unstructured-client==0.23.5 +unstructured-client==0.27.0 # via unstructured uritemplate==4.1.1 # via google-api-python-client -urllib3==2.2.1 +urllib3==2.2.3 # via requests - # via unstructured-client -virtualenv==20.26.2 +virtualenv==20.27.1 # via pre-commit -watchdog==4.0.1 +watchdog==6.0.0 # via mkdocs wcwidth==0.2.13 # via prompt-toolkit +webencodings==0.5.1 + # via html5lib wrapt==1.16.0 # via unstructured -yarl==1.9.4 +yarl==1.17.2 # via aiohttp diff --git a/requirements.lock b/requirements.lock index 4abc699..1d6014a 100644 --- a/requirements.lock +++ b/requirements.lock @@ -7,18 +7,19 @@ # all-features: false # with-sources: false # generate-hashes: false +# universal: false -e file:. annotated-types==0.7.0 # via pydantic -anyio==4.4.0 +anyio==4.6.2.post1 # via httpx # via openai -certifi==2024.6.2 +certifi==2024.8.30 # via httpcore # via httpx # via requests -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests distro==1.9.0 # via openai @@ -26,66 +27,73 @@ docstring-parser==0.16 # via funcchain h11==0.14.0 # via httpcore -httpcore==1.0.5 +httpcore==1.0.7 # via httpx -httpx==0.27.0 +httpx==0.27.2 + # via langsmith # via openai -idna==3.7 +idna==3.10 # via anyio # via httpx # via requests jinja2==3.1.4 # via funcchain +jiter==0.7.1 + # via openai jsonpatch==1.33 # via langchain-core jsonpointer==3.0.0 # via jsonpatch -langchain-core==0.2.7 +langchain-core==0.3.19 # via funcchain # via langchain-openai -langchain-openai==0.1.8 +langchain-openai==0.2.9 # via funcchain -langsmith==0.1.77 +langsmith==0.1.143 # via langchain-core -markupsafe==2.1.5 +markupsafe==3.0.2 # via jinja2 -openai==1.34.0 +openai==1.54.4 # via langchain-openai -orjson==3.10.5 +orjson==3.10.11 # via langsmith -packaging==24.1 +packaging==24.2 # via langchain-core -pydantic==2.7.4 +pydantic==2.9.2 # via langchain-core # via langsmith # via openai # via pydantic-settings -pydantic-core==2.18.4 +pydantic-core==2.23.4 # via pydantic -pydantic-settings==2.3.3 +pydantic-settings==2.6.1 # via funcchain python-dotenv==1.0.1 # via pydantic-settings -pyyaml==6.0.1 +pyyaml==6.0.2 # via langchain-core -regex==2024.5.15 +regex==2024.11.6 # via tiktoken requests==2.32.3 # via langsmith + # via requests-toolbelt # via tiktoken +requests-toolbelt==1.0.0 + # via langsmith sniffio==1.3.1 # via anyio # via httpx # via openai -tenacity==8.3.0 +tenacity==9.0.0 # via langchain-core -tiktoken==0.7.0 +tiktoken==0.8.0 # via langchain-openai -tqdm==4.66.4 +tqdm==4.67.0 # via openai typing-extensions==4.12.2 + # via langchain-core # via openai # via pydantic # via pydantic-core -urllib3==2.2.1 +urllib3==2.2.3 # via requests diff --git a/src/funcchain/backend/prompt.py b/src/funcchain/backend/prompt.py index f951fc8..dab2cec 100644 --- a/src/funcchain/backend/prompt.py +++ b/src/funcchain/backend/prompt.py @@ -193,7 +193,7 @@ def from_template( """ prompt = PromptTemplate.from_template( template, - template_format=template_format, + template_format=template_format, # type: ignore partial_variables=partial_variables, ) kwargs["images"] = images diff --git a/src/funcchain/model/patches/llamacpp.py b/src/funcchain/model/patches/llamacpp.py index 6e8fd94..3b678b1 100644 --- a/src/funcchain/model/patches/llamacpp.py +++ b/src/funcchain/model/patches/llamacpp.py @@ -15,9 +15,9 @@ SystemMessage, ) from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult -from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.utils import get_pydantic_field_names from langchain_core.utils.utils import build_extra_kwargs +from pydantic import Field, model_validator logger = logging.getLogger(__name__) @@ -130,7 +130,7 @@ class _LlamaCppCommon(BaseLanguageModel): verbose: bool = False """Print verbose output to stderr.""" - @root_validator() + @model_validator(mode="before") def validate_environment(cls, values: Dict) -> Dict: """Validate that llama-cpp-python library is installed.""" try: @@ -187,7 +187,7 @@ def validate_environment(cls, values: Dict) -> Dict: pass return values - @root_validator(pre=True) + @model_validator(mode="before") def build_model_kwargs(cls, values: Dict[str, Any]) -> Dict[str, Any]: """Build extra kwargs from additional params that were passed in.""" all_required_field_names = get_pydantic_field_names(cls) diff --git a/src/funcchain/model/patches/ollama.py b/src/funcchain/model/patches/ollama.py index 31792e7..ade9aa3 100644 --- a/src/funcchain/model/patches/ollama.py +++ b/src/funcchain/model/patches/ollama.py @@ -3,7 +3,7 @@ import requests # type: ignore from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, SystemMessage -from langchain_core.pydantic_v1 import validator +from pydantic import field_validator try: from langchain_community.chat_models import ChatOllama as _ChatOllama @@ -14,7 +14,7 @@ class ChatOllama(_ChatOllama): The [GBNF](https://github.com/ggerganov/llama.cpp/tree/master/grammars) grammar used to constrain the output. """ - @validator("grammar") + @field_validator("grammar") def _validate_grammar(cls, v: Optional[str]) -> Optional[str]: if v is not None and "root ::=" not in v: raise ValueError("Grammar must contain a root rule.") diff --git a/src/funcchain/parser/primitive_types.py b/src/funcchain/parser/primitive_types.py index 88b314f..3c176c9 100644 --- a/src/funcchain/parser/primitive_types.py +++ b/src/funcchain/parser/primitive_types.py @@ -2,7 +2,7 @@ Primitive Types Parser """ -from typing import Generic, TypeVar +from typing import Any, TypeVar from pydantic import BaseModel, create_model @@ -12,7 +12,7 @@ M = TypeVar("M", bound=BaseModel) -class RetryJsonPrimitiveTypeParser(RetryJsonPydanticParser, Generic[M]): +class RetryJsonPrimitiveTypeParser(RetryJsonPydanticParser): """ Parse primitve types by wrapping them in a PydanticModel and parsing them. Examples: int, float, bool, list[str], dict[str, int], Literal["a", "b", "c"], etc. @@ -30,7 +30,7 @@ def __init__( retry_llm=retry_llm, ) - def parse(self, text: str) -> M: + def parse(self, text: str) -> Any: return super().parse(text).value def get_format_instructions(self) -> str: diff --git a/src/funcchain/schema/signature.py b/src/funcchain/schema/signature.py index 92701a4..ea86fea 100644 --- a/src/funcchain/schema/signature.py +++ b/src/funcchain/schema/signature.py @@ -1,7 +1,7 @@ from typing import Any from langchain_core.messages import BaseMessage -from langchain_core.pydantic_v1 import BaseModel, Field +from pydantic import BaseModel, Field from ..backend.settings import FuncchainSettings, settings diff --git a/src/funcchain/syntax/components/router.py b/src/funcchain/syntax/components/router.py index 3174dee..7ca6338 100644 --- a/src/funcchain/syntax/components/router.py +++ b/src/funcchain/syntax/components/router.py @@ -71,7 +71,7 @@ def _selector(self) -> Runnable[dict[str, Any], Any]: from pydantic import BaseModel, Field class RouterModel(BaseModel): - selector: RouteChoices = Field( + selector: RouteChoices = Field( # type: ignore default="default", description="Enum of the available routes.", ) @@ -108,8 +108,8 @@ def _add_default_handler(self) -> None: def _routes_repr(self) -> str: return "\n".join([f"{route_name}: {route['description']}" for route_name, route in self.routes.items()]) - def invoke(self, input: HumanMessage, config: RunnableConfig | None = None) -> AIMessage: - return self.runnable.invoke(input, config=config) + def invoke(self, input: HumanMessage, config: RunnableConfig | None = None, **kwargs: Any | None) -> AIMessage: + return self.runnable.invoke(input, config, **kwargs) async def ainvoke(self, input: HumanMessage, config: RunnableConfig | None = None, **kwargs: Any) -> AIMessage: return await self.runnable.ainvoke(input, config, **kwargs) diff --git a/src/funcchain/utils/memory.py b/src/funcchain/utils/memory.py index 78452db..19bdcb9 100644 --- a/src/funcchain/utils/memory.py +++ b/src/funcchain/utils/memory.py @@ -2,7 +2,7 @@ from langchain_core.chat_history import BaseChatMessageHistory from langchain_core.messages import BaseMessage -from langchain_core.pydantic_v1 import BaseModel, Field +from pydantic import BaseModel, Field from ..schema.types import ChatHistoryFactory