From 67a8ef43066fe81efa9f9e655900a5b0f2c53cda Mon Sep 17 00:00:00 2001 From: Simon Farshid Date: Thu, 18 Jul 2024 23:02:34 -0700 Subject: [PATCH] docs: add more LLM providers (#534) --- apps/docs/content/docs/docs/index.mdx | 145 ++++++++++++++++++++++++-- 1 file changed, 139 insertions(+), 6 deletions(-) diff --git a/apps/docs/content/docs/docs/index.mdx b/apps/docs/content/docs/docs/index.mdx index 9560014e3..fa1bacd0e 100644 --- a/apps/docs/content/docs/docs/index.mdx +++ b/apps/docs/content/docs/docs/index.mdx @@ -60,27 +60,55 @@ npm install @assistant-ui/react ### Setup Backend Endpoint - +Install provider SDK: -```sh tab="OpenAI" + + +```sh title="Terminal" tab="OpenAI" npm install @ai-sdk/openai ``` -```sh tab="Anthropic" +```sh title="Terminal" tab="Anthropic" npm install @ai-sdk/anthropic ``` -```sh tab="Groq" +```sh title="Terminal" tab="AWS" +npm install @ai-sdk/amazon-bedrock +``` + +```sh title="Terminal" tab="Azure" +npm install @ai-sdk/azure +``` + +```sh title="Terminal" tab="GCP" +npm install @ai-sdk/google-vertex +``` + +```sh title="Terminal" tab="Groq" npm install @ai-sdk/openai ``` -```sh tab="Fireworks" +```sh title="Terminal" tab="Fireworks" npm install @ai-sdk/openai ``` +```sh title="Terminal" tab="Cohere" +npm install @ai-sdk/cohere +``` + +```sh title="Terminal" tab="Ollama" +npm install ollama-ai-provider +``` + +```sh title="Terminal" tab="Chrome AI" +npm install chrome-ai +``` + - +Add an API endpoint: + + ```ts title="/app/api/chat/route.ts" tab="OpenAI" import { openai } from "@ai-sdk/openai"; import { createEdgeRuntimeAPI } from "@assistant-ui/react/edge"; @@ -99,6 +127,33 @@ export const { POST } = createEdgeRuntimeAPI({ }); ``` +```ts title="/app/api/chat/route.ts" tab="Azure" +import { azure } from "@ai-sdk/azure"; +import { createEdgeRuntimeAPI } from "@assistant-ui/react/edge"; + +export const { POST } = createEdgeRuntimeAPI({ + model: azure("your-deployment-name"), +}); +``` + +```ts title="/app/api/chat/route.ts" tab="AWS" +import { bedrock } from "@ai-sdk/amazon-bedrock"; +import { createEdgeRuntimeAPI } from "@assistant-ui/react/edge"; + +export const { POST } = createEdgeRuntimeAPI({ + model: bedrock("anthropic.claude-3-5-sonnet-20240620-v1:0"), +}); +``` + +```ts title="/app/api/chat/route.ts" tab="GCP" +import { vertex } from "@ai-sdk/google-vertex"; +import { createEdgeRuntimeAPI } from "@assistant-ui/react/edge"; + +export const { POST } = createEdgeRuntimeAPI({ + model: vertex("gemini-1.5-pro"), +}); +``` + ```ts title="/app/api/chat/route.ts" tab="Groq" import { createOpenAI } from "@ai-sdk/openai"; import { createEdgeRuntimeAPI } from "@assistant-ui/react/edge"; @@ -127,6 +182,84 @@ export const { POST } = createEdgeRuntimeAPI({ }); ``` +```ts title="/app/api/chat/route.ts" tab="Cohere" +import { cohere } from "@ai-sdk/cohere"; +import { createEdgeRuntimeAPI } from "@assistant-ui/react/edge"; + +export const { POST } = createEdgeRuntimeAPI({ + model: cohere("command-r-plus"), +}); +``` + +```ts title="/app/api/chat/route.ts" tab="Ollama" +import { ollama } from "ollama-ai-provider"; +import { createEdgeRuntimeAPI } from "@assistant-ui/react/edge"; + +export const { POST } = createEdgeRuntimeAPI({ + model: ollama("llama3"), +}); +``` + +```ts title="/app/api/chat/route.ts" tab="Chrome AI" +import { chromeai } from "chrome-ai"; +import { createEdgeRuntimeAPI } from "@assistant-ui/react/edge"; + +export const { POST } = createEdgeRuntimeAPI({ + model: chromeai(), +}); +``` + + + +Define environment variables: + + + +```sh title="/.env.local" tab="OpenAI" +OPENAI_API_KEY="sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +``` + +```sh title="/.env.local" tab="Anthropic" +ANTHROPIC_API_KEY="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +``` + +```sh title="/.env.local" tab="AWS" +AWS_ACCESS_KEY_ID="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +AWS_SECRET_ACCESS_KEY="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +AWS_REGION="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +``` + +```sh title="/.env.local" tab="Azure" +AZURE_RESOURCE_NAME="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +AZURE_API_KEY="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +``` + +```sh title="/.env.local" tab="GCP" +GOOGLE_VERTEX_LOCATION="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +GOOGLE_VERTEX_LOCATION="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +GOOGLE_APPLICATION_CREDENTIALS="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +``` + +```sh title="/.env.local" tab="Groq" +GROQ_API_KEY="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +``` + +```sh title="/.env.local" tab="Fireworks" +FIREWORKS_API_KEY="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +``` + +```sh title="/.env.local" tab="Cohere" +COHERE_API_KEY="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +``` + +```sh tab="Ollama" + +``` + +```sh tab="Chrome AI" + +``` + If you aren't using Next.js, you can also deploy this endpoint to Cloudflare Workers, or any other serverless platform.