Skip to content

Commit

Permalink
🚧 still wip: chat handler factory
Browse files Browse the repository at this point in the history
  • Loading branch information
shroominic committed Jun 15, 2024
1 parent 8fc4c09 commit f0b388f
Showing 1 changed file with 17 additions and 25 deletions.
42 changes: 17 additions & 25 deletions src/funcchain/syntax/components/handler.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
from typing import Any

from langchain_core.language_models import BaseChatModel
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables.history import RunnableWithMessageHistory

from ...backend.settings import create_local_settings
from ...model.defaults import univeral_model_selector
from ...schema.types import ChatHandler, OptionalChatHistoryFactory, UniversalChatModel
from ...utils.memory import InMemoryChatMessageHistory, create_history_factory
from ...utils.msg_tools import msg_to_str
from ...schema.types import ChatRunnable, UniversalChatModel


def load_universal_llm(llm: UniversalChatModel) -> BaseChatModel:
Expand All @@ -21,28 +20,21 @@ def load_universal_llm(llm: UniversalChatModel) -> BaseChatModel:
def create_chat_handler(
*,
llm: UniversalChatModel = None,
history_factory: OptionalChatHistoryFactory = None,
system_message: str = "",
) -> ChatHandler:
history_factory = history_factory or create_history_factory(InMemoryChatMessageHistory)
llm = load_universal_llm(llm)

chat_handler_chain = (
ChatPromptTemplate.from_messages(
system_message: str | None,
tools: list[str] = [],
vision: bool = False,
read_files: bool = False,
read_links: bool = False,
code_interpreter: bool = False,
**kwargs: Any,
) -> ChatRunnable:
return (
{"messages": lambda x: x}
| ChatPromptTemplate.from_messages(
[
*([("system", system_message)] if system_message else []), # todo test this
MessagesPlaceholder(variable_name="history"),
("human", "{message}"),
*([("system", system_message)] if system_message else []),
MessagesPlaceholder(variable_name="messages"),
]
)
| llm
)
return {
# todo handle images
"message": lambda x: msg_to_str(x),
} | RunnableWithMessageHistory(
chat_handler_chain, # type: ignore
get_session_history=history_factory,
input_messages_key="message",
history_messages_key="history",
| load_universal_llm(llm) # type: ignore
)

0 comments on commit f0b388f

Please sign in to comment.