From c1c044021c8185a1e40958ff0d304420bb614930 Mon Sep 17 00:00:00 2001 From: Simon Farshid Date: Tue, 10 Sep 2024 10:25:06 -0700 Subject: [PATCH] refactor: rename to useLangGraphRuntime (#811) --- .changeset/thirty-cameras-hope.md | 5 +++++ apps/docs/content/docs/runtimes/langgraph.mdx | 8 +++---- .../with-langgraph/app/MyRuntimeProvider.tsx | 10 +++------ examples/with-langgraph/lib/chatApi.ts | 22 +++++++++---------- packages/react-langgraph/src/index.ts | 8 ++++++- .../src/useLangGraphRuntime.ts | 2 +- 6 files changed, 30 insertions(+), 25 deletions(-) create mode 100644 .changeset/thirty-cameras-hope.md diff --git a/.changeset/thirty-cameras-hope.md b/.changeset/thirty-cameras-hope.md new file mode 100644 index 000000000..f1829846b --- /dev/null +++ b/.changeset/thirty-cameras-hope.md @@ -0,0 +1,5 @@ +--- +"@assistant-ui/react-langgraph": patch +--- + +refactor: rename to useLangGraphRuntime diff --git a/apps/docs/content/docs/runtimes/langgraph.mdx b/apps/docs/content/docs/runtimes/langgraph.mdx index 0382d403e..99306783a 100644 --- a/apps/docs/content/docs/runtimes/langgraph.mdx +++ b/apps/docs/content/docs/runtimes/langgraph.mdx @@ -1,10 +1,10 @@ --- -title: LangChain LangGraph +title: LangGraph Cloud --- ## Overview -Integration with LangChain's LangGraph server. +Integration with LangChain's LangGraph Cloud server. ## Requirements @@ -92,7 +92,7 @@ export const sendMessage = async (params: { import { useRef } from "react"; import { AssistantRuntimeProvider } from "@assistant-ui/react"; -import { useLangChainLangGraphRuntime } from "@assistant-ui/react-langgraph"; +import { useLangGraphRuntime } from "@assistant-ui/react-langgraph"; import { createThread, sendMessage } from "@/lib/chatApi"; export function MyRuntimeProvider({ @@ -101,7 +101,7 @@ export function MyRuntimeProvider({ children: React.ReactNode; }>) { const threadIdRef = useRef(); - const assistant = useLangChainLangGraphRuntime({ + const assistant = useLangGraphRuntime({ threadId: threadIdRef.current, stream: async (message) => { if (!threadIdRef.current) { diff --git a/examples/with-langgraph/app/MyRuntimeProvider.tsx b/examples/with-langgraph/app/MyRuntimeProvider.tsx index d9812cfb5..da0a5ee5d 100644 --- a/examples/with-langgraph/app/MyRuntimeProvider.tsx +++ b/examples/with-langgraph/app/MyRuntimeProvider.tsx @@ -1,8 +1,8 @@ "use client"; -import { AssistantRuntimeProvider } from "@assistant-ui/react"; -import { useLangChainLangGraphRuntime } from "@assistant-ui/react-langgraph"; import { useRef } from "react"; +import { AssistantRuntimeProvider } from "@assistant-ui/react"; +import { useLangGraphRuntime } from "@assistant-ui/react-langgraph"; import { createThread, sendMessage } from "@/lib/chatApi"; export function MyRuntimeProvider({ @@ -11,7 +11,7 @@ export function MyRuntimeProvider({ children: React.ReactNode; }>) { const threadIdRef = useRef(); - const runtime = useLangChainLangGraphRuntime({ + const runtime = useLangGraphRuntime({ threadId: threadIdRef.current, stream: async (message) => { if (!threadIdRef.current) { @@ -21,11 +21,7 @@ export function MyRuntimeProvider({ const threadId = threadIdRef.current; return sendMessage({ threadId, - assistantId: process.env["NEXT_PUBLIC_LANGGRAPH_GRAPH_ID"] as string, message, - model: "openai", - userId: "", - systemInstructions: "", }); }, }); diff --git a/examples/with-langgraph/lib/chatApi.ts b/examples/with-langgraph/lib/chatApi.ts index ec9b2937c..020a752d4 100644 --- a/examples/with-langgraph/lib/chatApi.ts +++ b/examples/with-langgraph/lib/chatApi.ts @@ -41,11 +41,7 @@ export const updateState = async ( export const sendMessage = async (params: { threadId: string; - assistantId: string; message: LangChainMessage | null; - model: string; - userId: string; - systemInstructions: string; }) => { const client = createClient(); @@ -53,19 +49,21 @@ export const sendMessage = async (params: { if (params.message !== null) { input = { messages: [params.message], - userId: params.userId, }; } const config = { configurable: { - model_name: params.model, - system_instructions: params.systemInstructions, + model_name: "openai", }, }; - return client.runs.stream(params.threadId, params.assistantId, { - input, - config, - streamMode: "messages", - }); + return client.runs.stream( + params.threadId, + process.env["NEXT_PUBLIC_LANGGRAPH_GRAPH_ID"] as string, + { + input, + config, + streamMode: "messages", + }, + ); }; diff --git a/packages/react-langgraph/src/index.ts b/packages/react-langgraph/src/index.ts index d0997108b..393f3d210 100644 --- a/packages/react-langgraph/src/index.ts +++ b/packages/react-langgraph/src/index.ts @@ -1,4 +1,5 @@ -export { useLangChainLangGraphRuntime } from "./useLangGraphRuntime"; +export { useLangGraphRuntime } from "./useLangGraphRuntime"; + export { useLangGraphMessages } from "./useLangGraphMessages"; export { convertLangchainMessages } from "./convertLangchainMessages"; export type { @@ -7,3 +8,8 @@ export type { LangChainToolCall, LangChainToolCallChunk, } from "./types"; + +/** + * @deprecated Use `useLangGraphRuntime` instead. This will be removed in 0.1.0. + */ +export { useLangGraphRuntime as useLangChainLangGraphRuntime } from "./useLangGraphRuntime"; diff --git a/packages/react-langgraph/src/useLangGraphRuntime.ts b/packages/react-langgraph/src/useLangGraphRuntime.ts index b162c2e38..5dc51a058 100644 --- a/packages/react-langgraph/src/useLangGraphRuntime.ts +++ b/packages/react-langgraph/src/useLangGraphRuntime.ts @@ -8,7 +8,7 @@ import { convertLangchainMessages } from "./convertLangchainMessages"; import { useLangGraphMessages } from "./useLangGraphMessages"; import { ExternalStoreRuntime } from "@assistant-ui/react"; -export const useLangChainLangGraphRuntime = ({ +export const useLangGraphRuntime = ({ threadId, stream, }: {