Skip to content

Commit

Permalink
Merge pull request #1112 from arc53/feat/openai-proxy
Browse files Browse the repository at this point in the history
feat: added easy way to proxy
  • Loading branch information
dartpain authored Sep 2, 2024
2 parents 736add0 + 817fc5d commit c4cb9b0
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 17 deletions.
1 change: 1 addition & 0 deletions application/core/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ class Settings(BaseSettings):
OPENAI_API_VERSION: Optional[str] = None # azure openai api version
AZURE_DEPLOYMENT_NAME: Optional[str] = None # azure deployment name for answering
AZURE_EMBEDDINGS_DEPLOYMENT_NAME: Optional[str] = None # azure deployment name for embeddings
OPENAI_BASE_URL: Optional[str] = None # openai base url for open ai compatable models

# elasticsearch
ELASTIC_CLOUD_ID: Optional[str] = None # cloud id for elasticsearch
Expand Down
27 changes: 10 additions & 17 deletions application/llm/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,25 +2,23 @@
from application.core.settings import settings



class OpenAILLM(BaseLLM):

def __init__(self, api_key=None, user_api_key=None, *args, **kwargs):
global openai
from openai import OpenAI

super().__init__(*args, **kwargs)
self.client = OpenAI(
api_key=api_key,
)
if settings.OPENAI_BASE_URL:
self.client = OpenAI(
api_key=api_key,
base_url=settings.OPENAI_BASE_URL
)
else:
self.client = OpenAI(api_key=api_key)
self.api_key = api_key
self.user_api_key = user_api_key

def _get_openai(self):
# Import openai when needed
import openai

return openai

def _raw_gen(
self,
baseself,
Expand All @@ -29,7 +27,7 @@ def _raw_gen(
stream=False,
engine=settings.AZURE_DEPLOYMENT_NAME,
**kwargs
):
):
response = self.client.chat.completions.create(
model=model, messages=messages, stream=stream, **kwargs
)
Expand All @@ -44,7 +42,7 @@ def _raw_gen_stream(
stream=True,
engine=settings.AZURE_DEPLOYMENT_NAME,
**kwargs
):
):
response = self.client.chat.completions.create(
model=model, messages=messages, stream=stream, **kwargs
)
Expand Down Expand Up @@ -73,8 +71,3 @@ def __init__(
api_base=settings.OPENAI_API_BASE,
deployment_name=settings.AZURE_DEPLOYMENT_NAME,
)

def _get_openai(self):
openai = super()._get_openai()

return openai
8 changes: 8 additions & 0 deletions docs/pages/Guides/How-to-use-different-LLM.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,14 @@ List of latest supported LLMs are https://github.com/arc53/DocsGPT/blob/main/app
Visit application/llm and select the file of your selected llm and there you will find the speicifc requirements needed to be filled in order to use it,i.e API key of that llm.
</Steps>

### For OpenAI-Compatible Endpoints:
DocsGPT supports the use of OpenAI-compatible endpoints through base URL substitution. This feature allows you to use alternative AI models or services that implement the OpenAI API interface.


Set the OPENAI_BASE_URL in your environment. You can change .env file with OPENAI_BASE_URL with the desired base URL or docker-compose.yml file and add the environment variable to the backend container.

> Make sure you have the right API_KEY and correct LLM_NAME.



0 comments on commit c4cb9b0

Please sign in to comment.