diff --git a/docs/docs/05-configuration/02-model-providers.md b/docs/docs/05-configuration/02-model-providers.md index 17e8a94c..91784969 100644 --- a/docs/docs/05-configuration/02-model-providers.md +++ b/docs/docs/05-configuration/02-model-providers.md @@ -36,6 +36,7 @@ If this model provider hasn't been configured, then the API would return somethi ``` To configure a model provider using the API, a `POST` request can be made to `/api/model-providers/azure-openai-model-provider/configure` with each required configuration parameter set in the body: + ```json { "OBOT_AZURE_OPENAI_MODEL_PROVIDER_ENDPOINT": "...", @@ -107,3 +108,10 @@ Voyage is Anthropic's recommended text-embedding provider. The Voyage model prov The Ollama model provider requires the configuration parameter `OBOT_OLLAMA_MODEL_PROVIDER_HOST`. This host must point to a running instance of Ollama. For your reference, the default host and port for Ollama is `127.0.0.1:11434`. Obot doesn't set this by default. To set up and run an instance of Ollama, refer to the [Ollama GitHub readme](https://github.com/ollama/ollama/blob/main/README.md). + +## vLLM + +The vLLM model provider requires the following configuration parameters: + +- `OTTO8_VLLM_MODEL_PROVIDER_ENDPOINT`: The endpoint to use for your vLLM server (e.g., `your-vllm-server.com` or `https://your-vllm-server.com`) +- `OTTO8_VLLM_MODEL_PROVIDER_API_KEY`: The API key for authentication with your vLLM server diff --git a/ui/admin/app/components/model-providers/ModelProviderForm.tsx b/ui/admin/app/components/model-providers/ModelProviderForm.tsx index 6de49f57..217a2fae 100644 --- a/ui/admin/app/components/model-providers/ModelProviderForm.tsx +++ b/ui/admin/app/components/model-providers/ModelProviderForm.tsx @@ -61,6 +61,7 @@ const translateUserFriendlyLabel = (label: string) => { "OBOT_OLLAMA_MODEL_PROVIDER", "OBOT_VOYAGE_MODEL_PROVIDER", "OBOT_GROQ_MODEL_PROVIDER", + "OBOT_VLLM_MODEL_PROVIDER", "OBOT_ANTHROPIC_BEDROCK_MODEL_PROVIDER", ]; diff --git a/ui/admin/app/components/model-providers/constants.ts b/ui/admin/app/components/model-providers/constants.ts index 00bc002c..1b86f69a 100644 --- a/ui/admin/app/components/model-providers/constants.ts +++ b/ui/admin/app/components/model-providers/constants.ts @@ -1,6 +1,7 @@ export const CommonModelProviderIds = { OLLAMA: "ollama-model-provider", GROQ: "groq-model-provider", + VLLM: "vllm-model-provider", VOYAGE: "voyage-model-provider", ANTHROPIC: "anthropic-model-provider", OPENAI: "openai-model-provider", @@ -12,6 +13,7 @@ export const ModelProviderLinks = { [CommonModelProviderIds.VOYAGE]: "https://www.voyageai.com/", [CommonModelProviderIds.OLLAMA]: "https://ollama.com/", [CommonModelProviderIds.GROQ]: "https://groq.com/", + [CommonModelProviderIds.VLLM]: "https://docs.vllm.ai/", [CommonModelProviderIds.AZURE_OPENAI]: "https://azure.microsoft.com/en-us/explore/", [CommonModelProviderIds.ANTHROPIC]: "https://www.anthropic.com", @@ -42,6 +44,11 @@ export const ModelProviderRequiredTooltips: { "Api Key": "Groq API Key. Can be created and fetched from https://console.groq.com/keys", }, + [CommonModelProviderIds.VLLM]: { + Endpoint: + "Endpoint for the vLLM OpenAI service (eg. http://localhost:8000)", + "Api Key": "VLLM API Key set when starting the vLLM server", + }, [CommonModelProviderIds.AZURE_OPENAI]: { Endpoint: "Endpoint for the Azure OpenAI service (e.g. https://..api.cognitive.microsoft.com/)", @@ -89,6 +96,10 @@ export const ModelProviderSensitiveFields: Record = // Groq OBOT_GROQ_MODEL_PROVIDER_API_KEY: true, + // VLLM + OBOT_VLLM_MODEL_PROVIDER_ENDPOINT: false, + OBOT_VLLM_MODEL_PROVIDER_API_KEY: true, + // Anthropic Bedrock OBOT_ANTHROPIC_BEDROCK_MODEL_PROVIDER_ACCESS_KEY_ID: true, OBOT_ANTHROPIC_BEDROCK_MODEL_PROVIDER_SECRET_ACCESS_KEY: true, diff --git a/ui/admin/app/routes/_auth.model-providers.tsx b/ui/admin/app/routes/_auth.model-providers.tsx index f7755bee..65661bc9 100644 --- a/ui/admin/app/routes/_auth.model-providers.tsx +++ b/ui/admin/app/routes/_auth.model-providers.tsx @@ -33,6 +33,7 @@ const sortModelProviders = (modelProviders: ModelProvider[]) => { CommonModelProviderIds.OLLAMA, CommonModelProviderIds.VOYAGE, CommonModelProviderIds.GROQ, + CommonModelProviderIds.VLLM, ]; const aIndex = preferredOrder.indexOf(a.id); const bIndex = preferredOrder.indexOf(b.id); diff --git a/ui/admin/public/assets/vllm-logo.svg b/ui/admin/public/assets/vllm-logo.svg new file mode 100644 index 00000000..c7e03485 --- /dev/null +++ b/ui/admin/public/assets/vllm-logo.svg @@ -0,0 +1 @@ + \ No newline at end of file