diff --git a/application/core/settings.py b/application/core/settings.py index 6ae5475ce..bbd62fe4c 100644 --- a/application/core/settings.py +++ b/application/core/settings.py @@ -29,6 +29,7 @@ class Settings(BaseSettings): OPENAI_API_VERSION: Optional[str] = None # azure openai api version AZURE_DEPLOYMENT_NAME: Optional[str] = None # azure deployment name for answering AZURE_EMBEDDINGS_DEPLOYMENT_NAME: Optional[str] = None # azure deployment name for embeddings + OPENAI_BASE_URL: Optional[str] = None # openai base url for open ai compatable models # elasticsearch ELASTIC_CLOUD_ID: Optional[str] = None # cloud id for elasticsearch diff --git a/application/llm/openai.py b/application/llm/openai.py index b1574dd1a..f85de6eae 100644 --- a/application/llm/openai.py +++ b/application/llm/openai.py @@ -2,25 +2,23 @@ from application.core.settings import settings + class OpenAILLM(BaseLLM): def __init__(self, api_key=None, user_api_key=None, *args, **kwargs): - global openai from openai import OpenAI super().__init__(*args, **kwargs) - self.client = OpenAI( - api_key=api_key, - ) + if settings.OPENAI_BASE_URL: + self.client = OpenAI( + api_key=api_key, + base_url=settings.OPENAI_BASE_URL + ) + else: + self.client = OpenAI(api_key=api_key) self.api_key = api_key self.user_api_key = user_api_key - def _get_openai(self): - # Import openai when needed - import openai - - return openai - def _raw_gen( self, baseself, @@ -29,7 +27,7 @@ def _raw_gen( stream=False, engine=settings.AZURE_DEPLOYMENT_NAME, **kwargs - ): + ): response = self.client.chat.completions.create( model=model, messages=messages, stream=stream, **kwargs ) @@ -44,7 +42,7 @@ def _raw_gen_stream( stream=True, engine=settings.AZURE_DEPLOYMENT_NAME, **kwargs - ): + ): response = self.client.chat.completions.create( model=model, messages=messages, stream=stream, **kwargs ) @@ -73,8 +71,3 @@ def __init__( api_base=settings.OPENAI_API_BASE, deployment_name=settings.AZURE_DEPLOYMENT_NAME, ) - - def _get_openai(self): - openai = super()._get_openai() - - return openai diff --git a/docs/pages/Guides/How-to-use-different-LLM.mdx b/docs/pages/Guides/How-to-use-different-LLM.mdx index 908ddb28e..7df77742e 100644 --- a/docs/pages/Guides/How-to-use-different-LLM.mdx +++ b/docs/pages/Guides/How-to-use-different-LLM.mdx @@ -36,6 +36,14 @@ List of latest supported LLMs are https://github.com/arc53/DocsGPT/blob/main/app Visit application/llm and select the file of your selected llm and there you will find the speicifc requirements needed to be filled in order to use it,i.e API key of that llm. +### For OpenAI-Compatible Endpoints: +DocsGPT supports the use of OpenAI-compatible endpoints through base URL substitution. This feature allows you to use alternative AI models or services that implement the OpenAI API interface. + + +Set the OPENAI_BASE_URL in your environment. You can change .env file with OPENAI_BASE_URL with the desired base URL or docker-compose.yml file and add the environment variable to the backend container. + +> Make sure you have the right API_KEY and correct LLM_NAME. +