diff --git a/.env.example b/.env.example index edfebf3..ea4b29d 100644 --- a/.env.example +++ b/.env.example @@ -3,4 +3,5 @@ SALAI_TOKEN="Token of the Account from which you paid MidJourney" SERVER_ID="Server id here" CHANNEL_ID="Channel in which commands are sent" NEXT_PUBLIC_IMAGE_PREFIX="/" -HUGGINGFACE_TOKEN="huggingface token here https://huggingface.co/docs/hub/security-tokens" \ No newline at end of file +HUGGINGFACE_TOKEN="huggingface token here https://huggingface.co/docs/hub/security-tokens" +OPENAI_API_KEY="openai api key here" diff --git a/components/Form.tsx b/components/Form.tsx new file mode 100644 index 0000000..ae41831 --- /dev/null +++ b/components/Form.tsx @@ -0,0 +1,183 @@ +'use client' +import { useRef, useState } from 'react' +import useSWR from 'swr' + +interface ModelType { + object: 'engine' + id: string + ready: boolean + owner: string + permissions: null + created: string +} + +const Form = () => { + const messageInput = useRef(null) + const [response, setResponse] = useState([]) + const [isLoading, setIsLoading] = useState(false) + const [models, setModels] = useState([]) + const [currentModel, setCurrentModel] = useState('gpt-4') + + const handleEnter = ( + e: React.KeyboardEvent & + React.FormEvent + ) => { + if (e.key === 'Enter' && isLoading === false) { + e.preventDefault() + setIsLoading(true) + handleSubmit(e) + } + } + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault() + const message = messageInput.current?.value + if (message !== undefined) { + setResponse((prev) => [...prev, message]) + messageInput.current!.value = '' + } + + if (!message) { + return + } + + const response = await fetch('/api/response', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + message, + currentModel, + }), + }) + console.log('Edge function returned.') + + console.log(response) + + if (!response.ok) { + throw new Error(response.statusText) + } + + const data = response.body + if (!data) { + return + } + + const reader = data.getReader() + const decoder = new TextDecoder() + let done = false + + setResponse((prev) => [...prev, message]) + + let currentResponse: string[] = [] + while (!done) { + const { value, done: doneReading } = await reader.read() + done = doneReading + const chunkValue = decoder.decode(value) + // currentResponse = [...currentResponse, message, chunkValue]; + currentResponse = [...currentResponse, chunkValue] + setResponse((prev) => [...prev.slice(0, -1), currentResponse.join('')]) + } + // breaks text indent on refresh due to streaming + // localStorage.setItem('response', JSON.stringify(currentResponse)); + } + + const handleReset = () => { + localStorage.removeItem('response') + setResponse([]) + } + + useSWR('fetchingResponse', async () => { + const storedResponse = localStorage.getItem('response') + if (storedResponse) { + setResponse(JSON.parse(storedResponse)) + } + }) + + const handleModelChange = (e: React.ChangeEvent) => { + setCurrentModel(e.target.value) + } + + return ( +
+ +
+ {isLoading + ? response.map((item: any, index: number) => { + return ( +
+

{item}

+
+ ) + }) + : response + ? response.map((item: string, index: number) => { + return ( +
+

{item}

+
+ ) + }) + : null} +
+
+