Skip to content

Commit

Permalink
feat: integrate llamaindex chat-ui (#399)
Browse files Browse the repository at this point in the history
---------
Co-authored-by: Marcus Schiesser <[email protected]>
  • Loading branch information
thucpn authored Nov 1, 2024
1 parent 0251070 commit 78ccde7
Show file tree
Hide file tree
Showing 34 changed files with 290 additions and 2,019 deletions.
5 changes: 5 additions & 0 deletions .changeset/nice-garlics-repeat.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"create-llama": patch
---

feat: use llamaindex chat-ui for nextjs frontend
53 changes: 11 additions & 42 deletions templates/types/streaming/nextjs/app/components/chat-section.tsx
Original file line number Diff line number Diff line change
@@ -1,57 +1,26 @@
"use client";

import { ChatSection as ChatSectionUI } from "@llamaindex/chat-ui";
import "@llamaindex/chat-ui/styles/code.css";
import "@llamaindex/chat-ui/styles/katex.css";
import { useChat } from "ai/react";
import { useState } from "react";
import { ChatInput, ChatMessages } from "./ui/chat";
import CustomChatInput from "./ui/chat/chat-input";
import CustomChatMessages from "./ui/chat/chat-messages";
import { useClientConfig } from "./ui/chat/hooks/use-config";

export default function ChatSection() {
const { backend } = useClientConfig();
const [requestData, setRequestData] = useState<any>();
const {
messages,
input,
isLoading,
handleSubmit,
handleInputChange,
reload,
stop,
append,
setInput,
} = useChat({
body: { data: requestData },
const handler = useChat({
api: `${backend}/api/chat`,
headers: {
"Content-Type": "application/json", // using JSON because of vercel/ai 2.2.26
},
onError: (error: unknown) => {
if (!(error instanceof Error)) throw error;
const message = JSON.parse(error.message);
alert(message.detail);
alert(JSON.parse(error.message).detail);
},
sendExtraMessageFields: true,
});

return (
<div className="space-y-4 w-full h-full flex flex-col">
<ChatMessages
messages={messages}
isLoading={isLoading}
reload={reload}
stop={stop}
append={append}
/>
<ChatInput
input={input}
handleSubmit={handleSubmit}
handleInputChange={handleInputChange}
isLoading={isLoading}
messages={messages}
append={append}
setInput={setInput}
requestParams={{ params: requestData }}
setRequestData={setRequestData}
/>
</div>
<ChatSectionUI handler={handler} className="w-full h-full">
<CustomChatMessages />
<CustomChatInput />
</ChatSectionUI>
);
}

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import { useChatMessage } from "@llamaindex/chat-ui";
import { User2 } from "lucide-react";
import Image from "next/image";

export default function ChatAvatar({ role }: { role: string }) {
if (role === "user") {
export function ChatMessageAvatar() {
const { message } = useChatMessage();
if (message.role === "user") {
return (
<div className="flex h-8 w-8 shrink-0 select-none items-center justify-center rounded-md border bg-background shadow">
<User2 className="h-4 w-4" />
Expand Down
167 changes: 52 additions & 115 deletions templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx
Original file line number Diff line number Diff line change
@@ -1,34 +1,13 @@
import { JSONValue } from "ai";
import React from "react";
import { DocumentFile } from ".";
import { Button } from "../button";
import { DocumentPreview } from "../document-preview";
import FileUploader from "../file-uploader";
import { Textarea } from "../textarea";
import UploadImagePreview from "../upload-image-preview";
import { ChatHandler } from "./chat.interface";
import { useFile } from "./hooks/use-file";
import { LlamaCloudSelector } from "./widgets/LlamaCloudSelector";
"use client";

const ALLOWED_EXTENSIONS = ["png", "jpg", "jpeg", "csv", "pdf", "txt", "docx"];
import { ChatInput, useChatUI, useFile } from "@llamaindex/chat-ui";
import { DocumentPreview, ImagePreview } from "@llamaindex/chat-ui/widgets";
import { LlamaCloudSelector } from "./custom/llama-cloud-selector";
import { useClientConfig } from "./hooks/use-config";

export default function ChatInput(
props: Pick<
ChatHandler,
| "isLoading"
| "input"
| "onFileUpload"
| "onFileError"
| "handleSubmit"
| "handleInputChange"
| "messages"
| "setInput"
| "append"
> & {
requestParams?: any;
setRequestData?: React.Dispatch<any>;
},
) {
export default function CustomChatInput() {
const { requestData, isLoading, input } = useChatUI();
const { backend } = useClientConfig();
const {
imageUrl,
setImageUrl,
Expand All @@ -37,107 +16,65 @@ export default function ChatInput(
removeDoc,
reset,
getAnnotations,
} = useFile();

// default submit function does not handle including annotations in the message
// so we need to use append function to submit new message with annotations
const handleSubmitWithAnnotations = (
e: React.FormEvent<HTMLFormElement>,
annotations: JSONValue[] | undefined,
) => {
e.preventDefault();
props.append!({
content: props.input,
role: "user",
createdAt: new Date(),
annotations,
});
props.setInput!("");
};

const onSubmit = (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault();
const annotations = getAnnotations();
if (annotations.length) {
handleSubmitWithAnnotations(e, annotations);
return reset();
}
props.handleSubmit(e);
};
} = useFile({ uploadAPI: `${backend}/api/chat/upload` });

/**
* Handles file uploads. Overwrite to hook into the file upload behavior.
* @param file The file to upload
*/
const handleUploadFile = async (file: File) => {
// There's already an image uploaded, only allow one image at a time
if (imageUrl) {
alert("You can only upload one image at a time.");
return;
}

try {
await uploadFile(file, props.requestParams);
props.onFileUpload?.(file);
// Upload the file and send with it the current request data
await uploadFile(file, requestData);
} catch (error: any) {
const onFileUploadError = props.onFileError || window.alert;
onFileUploadError(error.message);
// Show error message if upload fails
alert(error.message);
}
};

const handleKeyDown = (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
if (e.key === "Enter" && !e.shiftKey) {
e.preventDefault();
onSubmit(e as unknown as React.FormEvent<HTMLFormElement>);
}
};
// Get references to the upload files in message annotations format, see https://github.com/run-llama/chat-ui/blob/main/packages/chat-ui/src/hook/use-file.tsx#L56
const annotations = getAnnotations();

return (
<form
onSubmit={onSubmit}
className="rounded-xl bg-white p-4 shadow-xl space-y-4 shrink-0"
<ChatInput
className="shadow-xl rounded-xl"
resetUploadedFiles={reset}
annotations={annotations}
>
{imageUrl && (
<UploadImagePreview url={imageUrl} onRemove={() => setImageUrl(null)} />
)}
{files.length > 0 && (
<div className="flex gap-4 w-full overflow-auto py-2">
{files.map((file: DocumentFile) => (
<DocumentPreview
key={file.id}
file={file}
onRemove={() => removeDoc(file)}
/>
))}
</div>
)}
<div className="flex w-full items-start justify-between gap-4 ">
<Textarea
id="chat-input"
autoFocus
name="message"
placeholder="Type a message"
className="flex-1 min-h-0 h-[40px]"
value={props.input}
onChange={props.handleInputChange}
onKeyDown={handleKeyDown}
/>
<FileUploader
onFileUpload={handleUploadFile}
onFileError={props.onFileError}
config={{
allowedExtensions: ALLOWED_EXTENSIONS,
disabled: props.isLoading,
multiple: true,
}}
/>
{process.env.NEXT_PUBLIC_USE_LLAMACLOUD === "true" &&
props.setRequestData && (
<LlamaCloudSelector setRequestData={props.setRequestData} />
)}
<Button
type="submit"
<div>
{/* Image preview section */}
{imageUrl && (
<ImagePreview url={imageUrl} onRemove={() => setImageUrl(null)} />
)}
{/* Document previews section */}
{files.length > 0 && (
<div className="flex gap-4 w-full overflow-auto py-2">
{files.map((file) => (
<DocumentPreview
key={file.id}
file={file}
onRemove={() => removeDoc(file)}
/>
))}
</div>
)}
</div>
<ChatInput.Form>
<ChatInput.Field />
<ChatInput.Upload onUpload={handleUploadFile} />
<LlamaCloudSelector />
<ChatInput.Submit
disabled={
props.isLoading || (!props.input.trim() && files.length === 0)
isLoading || (!input.trim() && files.length === 0 && !imageUrl)
}
>
Send message
</Button>
</div>
</form>
/>
</ChatInput.Form>
</ChatInput>
);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import {
ChatMessage,
ContentPosition,
getSourceAnnotationData,
useChatMessage,
} from "@llamaindex/chat-ui";
import { Markdown } from "./custom/markdown";
import { ToolAnnotations } from "./tools/chat-tools";

export function ChatMessageContent() {
const { message } = useChatMessage();
const customContent = [
{
// override the default markdown component
position: ContentPosition.MARKDOWN,
component: (
<Markdown
content={message.content}
sources={getSourceAnnotationData(message.annotations)?.[0]}
/>
),
},
{
// add the tool annotations after events
position: ContentPosition.AFTER_EVENTS,
component: <ToolAnnotations message={message} />,
},
];
return <ChatMessage.Content content={customContent} />;
}
Loading

0 comments on commit 78ccde7

Please sign in to comment.