From 6b04662952c8a157804c9d3653b7304eb8221ab5 Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Wed, 20 Mar 2024 16:37:23 -0700 Subject: [PATCH] Sentry, OTEL, langchain both directions, fly.toml for deriver --- README.md | 19 ++++++++++-- api/fly.toml | 2 +- api/src/harvester.py | 9 ++++++ api/src/main.py | 13 ++++---- example/cli/main.py | 14 ++------- example/discord/honcho-dspy-personas/bot.py | 4 +-- example/discord/honcho-fact-memory/bot.py | 4 +-- example/discord/simple-roast-bot/main.py | 4 +-- sdk/honcho/ext/langchain.py | 33 +++++++++++++++++---- 9 files changed, 69 insertions(+), 33 deletions(-) diff --git a/README.md b/README.md index 9fb1338..30a5034 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,30 @@ -# Honcho +# 🫡 Honcho ![Static Badge](https://img.shields.io/badge/Version-0.0.5-blue) [![Discord](https://img.shields.io/discord/1016845111637839922?style=flat&logo=discord&logoColor=23ffffff&label=Plastic%20Labs&labelColor=235865F2)](https://discord.gg/plasticlabs) ![GitHub License](https://img.shields.io/github/license/plastic-labs/honcho) ![GitHub Repo stars](https://img.shields.io/github/stars/plastic-labs/honcho) [![X (formerly Twitter) URL](https://img.shields.io/twitter/url?url=https%3A%2F%2Ftwitter.com%2Fplastic_labs)](https://twitter.com/plastic_labs) -A User context management solution for building AI Agents and LLM powered -applications. +Honcho is a platform for making AI agents and LLM powered applications that are personalized +to their end users. Read about the motivation of this project [here](https://blog.plasticlabs.ai). Read the user documenation [here](https://docs.honcho.dev) +## Table of Contents + +- [Project Structure](#project-structure) +- [Usage](#usage) + - [API](#api) + - [Docker](#docker) + - [Manually](#manually) + - [Deploying on Fly.io](#deploy-on-fly) + - [Client SDK](#client-sdk) + - [Use Locally](#use-locally) +- [Contributing](#contributing) +- [License](#license) + ## Project Structure The Honcho repo is a monorepo containing the server/API that manages database diff --git a/api/fly.toml b/api/fly.toml index 2012518..8c9f343 100644 --- a/api/fly.toml +++ b/api/fly.toml @@ -41,4 +41,4 @@ kill_timeout = "5s" cpu_kind = "shared" cpus = 1 memory_mb = 512 - processes = ["api"] + processes = ["api", "deriver"] diff --git a/api/src/harvester.py b/api/src/harvester.py index 7be78bc..6785f29 100644 --- a/api/src/harvester.py +++ b/api/src/harvester.py @@ -3,6 +3,7 @@ import uuid from typing import List +import sentry_sdk from dotenv import load_dotenv from langchain_core.output_parsers import NumberedListOutputParser from langchain_core.prompts import ( @@ -20,6 +21,14 @@ load_dotenv() +SENTRY_ENABLED = os.getenv("SENTRY_ENABLED", "False").lower() == "true" +if SENTRY_ENABLED: + sentry_sdk.init( + dsn=os.getenv("SENTRY_DSN"), + enable_tracing=True, + ) + + SUPABASE_ID = os.getenv("SUPABASE_ID") SUPABASE_API_KEY = os.getenv("SUPABASE_API_KEY") diff --git a/api/src/main.py b/api/src/main.py index 45f9cfd..d69d7f2 100644 --- a/api/src/main.py +++ b/api/src/main.py @@ -42,17 +42,18 @@ from slowapi.util import get_remote_address from starlette.exceptions import HTTPException as StarletteHTTPException -from .db import engine, scaffold_db from src.routers import ( apps, - users, - sessions, - messages, - metamessages, collections, documents, + messages, + metamessages, + sessions, + users, ) +from .db import engine, scaffold_db + # Otel Setup DEBUG_LOG_OTEL_TO_PROVIDER = ( @@ -171,7 +172,7 @@ def otel_logging_init(): otel_trace_init() otel_logging_init() - SQLAlchemyInstrumentor().instrument(engine=engine) + SQLAlchemyInstrumentor().instrument(engine=engine.sync_engine) # Sentry Setup diff --git a/example/cli/main.py b/example/cli/main.py index 56d048d..2e59878 100644 --- a/example/cli/main.py +++ b/example/cli/main.py @@ -7,7 +7,7 @@ from langchain_community.chat_models.fake import FakeListChatModel from honcho import Honcho -from honcho.ext.langchain import langchain_message_converter +from honcho.ext.langchain import _messages_to_langchain app_name = str(uuid4()) @@ -28,16 +28,6 @@ session = user.create_session() -# def langchain_message_converter(messages: List): -# new_messages = [] -# for message in messages: -# if message.is_user: -# new_messages.append(HumanMessage(content=message.content)) -# else: -# new_messages.append(AIMessage(content=message.content)) -# return new_messages - - def chat(): while True: user_input = input("User: ") @@ -46,7 +36,7 @@ def chat(): break user_message = HumanMessage(content=user_input) history = list(session.get_messages_generator()) - langchain_history = langchain_message_converter(history) + langchain_history = _messages_to_langchain(history) prompt = ChatPromptTemplate.from_messages( [system, *langchain_history, user_message] ) diff --git a/example/discord/honcho-dspy-personas/bot.py b/example/discord/honcho-dspy-personas/bot.py index ea91ec7..2e0ebfd 100644 --- a/example/discord/honcho-dspy-personas/bot.py +++ b/example/discord/honcho-dspy-personas/bot.py @@ -2,7 +2,7 @@ from uuid import uuid1 import discord from honcho import Honcho -from honcho.ext.langchain import langchain_message_converter +from honcho.ext.langchain import _messages_to_langchain from graph import chat from dspy import Example @@ -61,7 +61,7 @@ async def on_message(message): session = user.create_session(location_id) history = list(session.get_messages_generator())[:5] - chat_history = langchain_message_converter(history) + chat_history = _messages_to_langchain(history) inp = message.content user_message = session.create_message(is_user=True, content=inp) diff --git a/example/discord/honcho-fact-memory/bot.py b/example/discord/honcho-fact-memory/bot.py index 910afc6..aba5099 100644 --- a/example/discord/honcho-fact-memory/bot.py +++ b/example/discord/honcho-fact-memory/bot.py @@ -2,7 +2,7 @@ from uuid import uuid1 import discord from honcho import Honcho -from honcho.ext.langchain import langchain_message_converter +from honcho.ext.langchain import _messages_to_langchain from chain import LMChain @@ -59,7 +59,7 @@ async def on_message(message): session = user.create_session(location_id) history = list(session.get_messages_generator()) - chat_history = langchain_message_converter(history) + chat_history = _messages_to_langchain(history) inp = message.content user_message = session.create_message(is_user=True, content=inp) diff --git a/example/discord/simple-roast-bot/main.py b/example/discord/simple-roast-bot/main.py index 2a7a4b2..f6e18a0 100644 --- a/example/discord/simple-roast-bot/main.py +++ b/example/discord/simple-roast-bot/main.py @@ -11,7 +11,7 @@ from langchain_core.messages import AIMessage, HumanMessage from honcho import Honcho -from honcho.ext.langchain import langchain_message_converter +from honcho.ext.langchain import _messages_to_langchain load_dotenv() @@ -66,7 +66,7 @@ async def on_message(message): session = user.create_session(location_id) history = list(session.get_messages_generator()) - chat_history = langchain_message_converter(history) + chat_history = _messages_to_langchain(history) inp = message.content session.create_message(is_user=True, content=inp) diff --git a/sdk/honcho/ext/langchain.py b/sdk/honcho/ext/langchain.py index 844387e..a27a9fb 100644 --- a/sdk/honcho/ext/langchain.py +++ b/sdk/honcho/ext/langchain.py @@ -27,7 +27,15 @@ def wrapper(*args, **kwargs): @requires_langchain def _messages_to_langchain(messages: List[Message]): - """Converts Honcho messages to Langchain messages""" + """Converts Honcho messages to Langchain messages + + Args: + messages (List[Message]): The list of messages to convert + + Returns: + List: The list of converted LangChain messages + + """ from langchain_core.messages import AIMessage, HumanMessage # type: ignore new_messages = [] @@ -40,16 +48,31 @@ def _messages_to_langchain(messages: List[Message]): @requires_langchain -def _langchain_to_messages(messages, session: Union[Session, AsyncSession]): - """Converts Langchain messages to Langchain messages""" +def _langchain_to_messages( + messages, session: Union[Session, AsyncSession] +) -> List[Message]: + """Converts Langchain messages to Honcho messages and adds to appropriate session + + Args: + messages: The LangChain messages to convert + session: The session to add the messages to + + Returns: + List[Message]: The list of converted messages + + """ from langchain_core.messages import HumanMessage # type: ignore + messages = [] for message in messages: if isinstance(message, HumanMessage): - session.create_message( + message = session.create_message( is_user=True, content=message.content, metadata=message.metadata ) + messages.append(message) else: - session.create_message( + message = session.create_message( is_user=False, content=message.content, metadata=message.metadata ) + messages.append(message) + return messages