Skip to content

Commit

Permalink
Removed Message from llmclient.
Browse files Browse the repository at this point in the history
It lives in aviary now
  • Loading branch information
maykcaldas committed Dec 6, 2024
1 parent 848166e commit 5dde125
Show file tree
Hide file tree
Showing 11 changed files with 66 additions and 386 deletions.
10 changes: 0 additions & 10 deletions llmclient/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,14 @@
)
from .exceptions import (
JSONSchemaValidationError,
MalformedMessageError,
)
from .llms import (
LiteLLMModel,
LLMModel,
MultipleCompletionLLMModel,
)
from .messages import (
Message,
)
from .types import LLMResult
from .utils import (
encode_image_to_base64,
is_coroutine_callable,
setup_default_logs,
)

Expand All @@ -32,12 +26,8 @@
"LLMModel",
"LLMResult",
"LiteLLMModel",
"MalformedMessageError",
"Message",
"MultipleCompletionLLMModel",
"SentenceTransformerEmbeddingModel",
"SparseEmbeddingModel",
"encode_image_to_base64",
"is_coroutine_callable",
"setup_default_logs",
]
20 changes: 0 additions & 20 deletions llmclient/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,2 @@
from logging import LogRecord

from llmclient.messages import EMPTY_CONTENT_BASE_MSG


class JSONSchemaValidationError(ValueError):
"""Raised when the completion does not match the specified schema."""


class MalformedMessageError(ValueError):
"""Error to throw if some aspect of a Message variant is malformed."""

@classmethod
def common_retryable_errors_log_filter(cls, record: LogRecord) -> bool:
"""
Filter out common parsing failures not worth looking into from logs.
Returns:
False if the LogRecord should be filtered out, otherwise True to keep it.
"""
# NOTE: match both this Exception type's name and its content, to be robust
return not all(x in record.msg for x in (cls.__name__, EMPTY_CONTENT_BASE_MSG))
12 changes: 9 additions & 3 deletions llmclient/llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,14 @@
)

import litellm
from aviary.tools import Tool, ToolRequestMessage, ToolsAdapter, ToolSelector
from aviary.core import (
Message,
Tool,
ToolRequestMessage,
ToolsAdapter,
ToolSelector,
is_coroutine_callable,
)
from pydantic import (
BaseModel,
ConfigDict,
Expand All @@ -39,11 +46,10 @@
IS_PYTHON_BELOW_312,
)
from llmclient.exceptions import JSONSchemaValidationError
from llmclient.messages import Message
from llmclient.prompts import default_system_prompt
from llmclient.rate_limiter import GLOBAL_LIMITER
from llmclient.types import Chunk, LLMResult
from llmclient.utils import get_litellm_retrying_config, is_coroutine_callable
from llmclient.utils import get_litellm_retrying_config

logger = logging.getLogger(__name__)

Expand Down
148 changes: 0 additions & 148 deletions llmclient/messages.py

This file was deleted.

3 changes: 1 addition & 2 deletions llmclient/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,14 @@
from uuid import UUID, uuid4

import litellm
from aviary.core import Message
from pydantic import (
BaseModel,
ConfigDict,
Field,
computed_field,
)

from llmclient.messages import Message

logger = logging.getLogger(__name__)

# A context var that will be unique to threads/processes
Expand Down
31 changes: 1 addition & 30 deletions llmclient/utils.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
import base64
import contextlib
import io
import logging
import logging.config
from collections.abc import Callable
from inspect import iscoroutinefunction, isfunction, ismethod, signature
from inspect import signature
from typing import Any

import litellm
import numpy as np
import pymupdf


Expand All @@ -17,24 +14,6 @@ def get_litellm_retrying_config(timeout: float = 60.0) -> dict[str, Any]:
return {"num_retries": 3, "timeout": timeout}


def encode_image_to_base64(img: "np.ndarray") -> str:
"""Encode an image to a base64 string, to be included as an image_url in a Message."""
try:
from PIL import Image
except ImportError as e:
raise ImportError(
"Image processing requires the 'image' extra for 'Pillow'. Please:"
" `pip install fh-llm-client[image]`."
) from e

image = Image.fromarray(img)
buffer = io.BytesIO()
image.save(buffer, format="PNG")
return (
f"data:image/png;base64,{base64.b64encode(buffer.getvalue()).decode('utf-8')}"
)


def prepare_args(
func: Callable, chunk: str, name: str | None = None
) -> tuple[tuple, dict]:
Expand All @@ -44,14 +23,6 @@ def prepare_args(
return (chunk,), {}


def is_coroutine_callable(obj):
if isfunction(obj) or ismethod(obj):
return iscoroutinefunction(obj)
elif callable(obj): # noqa: RET505
return iscoroutinefunction(obj.__call__)
return False


def partial_format(value: str, **formats: dict[str, Any]) -> str:
"""Partially format a string given a variable amount of formats."""
for template_key, template_value in formats.items():
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ dependencies = [
"aiofiles",
"coredis",
"dm-tree",
"fhaviary>=0.8.2", # For core namespace
"fhaviary @ /Users/maykcaldas/Documents/FutureHouse/aviary", # "fhaviary>=0.8.2", # For core namespace
"httpx",
"limits",
"litellm>=1.44", # For LITELLM_LOG addition
Expand Down
3 changes: 1 addition & 2 deletions tests/test_llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import litellm
import numpy as np
import pytest
from aviary.core import Tool, ToolRequestMessage
from aviary.core import Message, Tool, ToolRequestMessage
from pydantic import BaseModel, Field

from llmclient.exceptions import JSONSchemaValidationError
Expand All @@ -17,7 +17,6 @@
MultipleCompletionLLMModel,
validate_json_completion,
)
from llmclient.messages import Message
from llmclient.types import LLMResult
from tests.conftest import VCR_DEFAULT_MATCH_ON

Expand Down
Loading

0 comments on commit 5dde125

Please sign in to comment.