Skip to content

Commit

Permalink
feat(ai-sdk): attachments support (#764)
Browse files Browse the repository at this point in the history
  • Loading branch information
Yonom authored Sep 7, 2024
1 parent 9e00772 commit 7809584
Show file tree
Hide file tree
Showing 6 changed files with 62 additions and 26 deletions.
2 changes: 2 additions & 0 deletions .changeset/olive-olives-learn.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
---
"@assistant-ui/react": patch
"@assistant-ui/react-playground": patch
"@assistant-ui/react-ai-sdk": patch
---

feat: add composer attachments state
5 changes: 5 additions & 0 deletions .changeset/purple-knives-grow.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@assistant-ui/react-ai-sdk": patch
---

feat: allow image content types
5 changes: 5 additions & 0 deletions .changeset/yellow-cameras-count.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@assistant-ui/react-ai-sdk": patch
---

feat: AI SDK attachments support
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import { useExternalStoreRuntime } from "@assistant-ui/react";
import { useCachedChunkedMessages } from "../utils/useCachedChunkedMessages";
import { convertMessage } from "../utils/convertMessage";
import { useInputSync } from "../utils/useInputSync";
import { toCreateMessage } from "../utils/toCreateMessage";

export const useVercelUseAssistantRuntime = (
assistantHelpers: ReturnType<typeof useAssistant>,
Expand All @@ -13,15 +14,7 @@ export const useVercelUseAssistantRuntime = (
messages,
onCancel: async () => assistantHelpers.stop(),
onNew: async (message) => {
if (message.content.length !== 1 || message.content[0]?.type !== "text")
throw new Error(
"VercelUseAssistantRuntime only supports text content.",
);

await assistantHelpers.append({
role: message.role,
content: message.content[0].text,
});
await assistantHelpers.append(await toCreateMessage(message));
},
onNewThread: () => {
assistantHelpers.messages = [];
Expand Down
21 changes: 4 additions & 17 deletions packages/react-ai-sdk/src/ui/use-chat/useVercelUseChatRuntime.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,42 +4,29 @@ import { convertMessage } from "../utils/convertMessage";
import { useExternalStoreRuntime } from "@assistant-ui/react";
import { useInputSync } from "../utils/useInputSync";
import { sliceMessagesUntil } from "../utils/sliceMessagesUntil";
import { toCreateMessage } from "../utils/toCreateMessage";

export const useVercelUseChatRuntime = (
chatHelpers: ReturnType<typeof useChat>,
) => {
const messages = useCachedChunkedMessages(chatHelpers.messages);

const runtime = useExternalStoreRuntime({
isRunning: chatHelpers.isLoading,
messages,
setMessages: (messages) => chatHelpers.setMessages(messages.flat()),
onCancel: async () => chatHelpers.stop(),
onNew: async (message) => {
if (message.content.length !== 1 || message.content[0]?.type !== "text")
throw new Error(
"Only text content is supported by VercelUseChatRuntime. Use the Edge runtime for image support.",
);
await chatHelpers.append({
role: message.role,
content: message.content[0].text,
});
await chatHelpers.append(await toCreateMessage(message));
},
onEdit: async (message) => {
if (message.content.length !== 1 || message.content[0]?.type !== "text")
throw new Error(
"Only text content is supported by VercelUseChatRuntime. Use the Edge runtime for image support.",
);

const newMessages = sliceMessagesUntil(
chatHelpers.messages,
message.parentId,
);
chatHelpers.setMessages(newMessages);

await chatHelpers.append({
role: message.role,
content: message.content[0].text,
});
await chatHelpers.append(await toCreateMessage(message));
},
onReload: async (parentId: string | null) => {
const newMessages = sliceMessagesUntil(chatHelpers.messages, parentId);
Expand Down
44 changes: 44 additions & 0 deletions packages/react-ai-sdk/src/ui/utils/toCreateMessage.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import { AppendMessage } from "@assistant-ui/react";
import { CreateMessage } from "ai";

export const toCreateMessage = async (
message: AppendMessage,
): Promise<CreateMessage> => {
const content = message.content
.filter((part) => part.type === "text")
.map((t) => t.text)
.join("\n\n");

const images = message.content
.filter((part) => part.type === "image")
.map((part) => ({ url: part.image }));

return {
role: message.role,
content,
experimental_attachments: [
...images,
...(await Promise.all(
message.attachments.map(async (m) => {
if (m.file == null)
throw new Error("Attachment did not contain a file");
return {
contentType: m.file.type,
name: m.file.name,
url: await getFileDataURL(m.file),
};
}),
)),
],
};
};

const getFileDataURL = (file: File) =>
new Promise<string>((resolve, reject) => {
const reader = new FileReader();

reader.onload = () => resolve(reader.result as string);
reader.onerror = (error) => reject(error);

reader.readAsDataURL(file);
});

0 comments on commit 7809584

Please sign in to comment.