diff --git a/apps/docs/content/docs/runtimes/langgraph.mdx b/apps/docs/content/docs/runtimes/langgraph.mdx index 3fb779473..30324f233 100644 --- a/apps/docs/content/docs/runtimes/langgraph.mdx +++ b/apps/docs/content/docs/runtimes/langgraph.mdx @@ -81,7 +81,7 @@ export const createThread = async () => { export const sendMessage = async (params: { threadId: string; - message: LangChainMessage; + messages: LangChainMessage; }) => { const client = createClient(); return client.runs.stream( @@ -89,7 +89,7 @@ export const sendMessage = async (params: { process.env["NEXT_PUBLIC_LANGGRAPH_ASSISTANT_ID"]!, { input: { - messages: [params.message], + messages: params.messages, }, streamMode: "messages", }, @@ -122,7 +122,7 @@ export function MyRuntimeProvider({ const threadIdRef = useRef(); const runtime = useLangGraphRuntime({ threadId: threadIdRef.current, - stream: async (message) => { + stream: async (messages) => { if (!threadIdRef.current) { const { thread_id } = await createThread(); threadIdRef.current = thread_id; @@ -130,7 +130,7 @@ export function MyRuntimeProvider({ const threadId = threadIdRef.current; return sendMessage({ threadId, - message, + messages, }); }, }); diff --git a/examples/with-langgraph/app/MyRuntimeProvider.tsx b/examples/with-langgraph/app/MyRuntimeProvider.tsx index da0a5ee5d..602cb173b 100644 --- a/examples/with-langgraph/app/MyRuntimeProvider.tsx +++ b/examples/with-langgraph/app/MyRuntimeProvider.tsx @@ -13,7 +13,7 @@ export function MyRuntimeProvider({ const threadIdRef = useRef(); const runtime = useLangGraphRuntime({ threadId: threadIdRef.current, - stream: async (message) => { + stream: async (messages) => { if (!threadIdRef.current) { const { thread_id } = await createThread(); threadIdRef.current = thread_id; @@ -21,7 +21,7 @@ export function MyRuntimeProvider({ const threadId = threadIdRef.current; return sendMessage({ threadId, - message, + messages, }); }, }); diff --git a/examples/with-langgraph/lib/chatApi.ts b/examples/with-langgraph/lib/chatApi.ts index 1e7969559..cb7d9851c 100644 --- a/examples/with-langgraph/lib/chatApi.ts +++ b/examples/with-langgraph/lib/chatApi.ts @@ -41,16 +41,13 @@ export const updateState = async ( export const sendMessage = async (params: { threadId: string; - message: LangChainMessage | null; + messages: LangChainMessage[]; }) => { const client = createClient(); - let input: Record | null = null; - if (params.message !== null) { - input = { - messages: [params.message], - }; - } + let input: Record | null = { + messages: params.messages, + }; const config = { configurable: { model_name: "openai",