Skip to content

Commit

Permalink
Merge pull request #43 from farizrahman4u/feature/azure_openai
Browse files Browse the repository at this point in the history
OpenAI Azure API support
  • Loading branch information
FayazRahman authored May 9, 2023
2 parents 93e1070 + 9082c09 commit c11f1ad
Show file tree
Hide file tree
Showing 15 changed files with 250 additions and 54 deletions.
3 changes: 3 additions & 0 deletions docs/source/api/embeddings.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,8 @@ Embedding Providers
.. automodule:: loopgpt.embeddings.openai_
:members:

.. automodule:: loopgpt.embeddings.azure_openai
:members:

.. automodule:: loopgpt.embeddings.hf
:members:
3 changes: 3 additions & 0 deletions docs/source/api/models.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@ Model
.. automodule:: loopgpt.models.openai_
:members:

.. automodule:: loopgpt.models.azure_openai
:members:

.. automodule:: loopgpt.models.llama_cpp
:members:

Expand Down
15 changes: 7 additions & 8 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,23 +6,22 @@
# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information

project = 'LoopGPT'
copyright = '2023, Fariz Rahman, Fayaz Rahman'
author = 'Fariz Rahman, Fayaz Rahman'
release = '0.0.13'
project = "LoopGPT"
copyright = "2023, Fariz Rahman, Fayaz Rahman"
author = "Fariz Rahman, Fayaz Rahman"
release = "0.0.13"

# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration

extensions = ["sphinx.ext.autodoc"]

templates_path = ['_templates']
templates_path = ["_templates"]
exclude_patterns = []



# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output

html_theme = 'alabaster'
html_static_path = ['_static']
html_theme = "alabaster"
html_static_path = ["_static"]
33 changes: 24 additions & 9 deletions loopgpt/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,22 @@
AgentStates,
)
from loopgpt.memory import from_config as memory_from_config
from loopgpt.models import OpenAIModel, from_config as model_from_config
from loopgpt.models import (
OpenAIModel,
AzureOpenAIModel,
from_config as model_from_config,
)
from loopgpt.tools import builtin_tools, from_config as tool_from_config
from loopgpt.tools.code import ai_function
from loopgpt.memory.local_memory import LocalMemory
from loopgpt.embeddings import OpenAIEmbeddingProvider
from loopgpt.embeddings import OpenAIEmbeddingProvider, AzureOpenAIEmbeddingProvider
from loopgpt.utils.spinner import spinner
from loopgpt.loops import cli


from typing import *

import openai
import json
import time
import ast
Expand All @@ -34,16 +39,17 @@ class Agent:
:param goals: A list of goals for the agent. Defaults to None.
:type goals: list, optional
:param model: The model to use for the agent.
Strings are accepted only for OpenAI models. Specify a :class:`BaseModel` object for other models.
Strings are accepted only for OpenAI models. Specify a :class:`~loopgpt.models.base.BaseModel` object for other models.
Defaults to "gpt-3.5-turbo".
:type model: str, :class:`BaseModel`, optional
:type model: str, :class:`~loopgpt.models.base.BaseModel`, optional
:param embedding_provider: The embedding provider to use for the agent.
Defaults to :class:`OpenAIEmbeddingProvider`.
Specify a :class:`BaseEmbeddingProvider` object to use other embedding providers.
:type embedding_provider: :class:`BaseEmbeddingProvider`, optional
Defaults to :class:`~loopgpt.embeddings.OpenAIEmbeddingProvider`.
Specify a :class:`~loopgpt.embeddings.provider.BaseEmbeddingProvider` object to use other embedding providers.
:type embedding_provider: :class:`~loopgpt.embeddings.provider.BaseEmbeddingProvider`, optional
:param temperature: The temperature to use for agent's chat completion. Defaults to 0.8.
:type temperature: float, optional
"""

def __init__(
self,
name=DEFAULT_AGENT_NAME,
Expand All @@ -53,10 +59,18 @@ def __init__(
embedding_provider=None,
temperature=0.8,
):
if openai.api_type == "azure":
if model is None:
raise ValueError(
"You must provide an AzureOpenAIModel to the `model` argument when using the OpenAI Azure API"
)
if embedding_provider is None:
raise ValueError(
"You must provide a deployed embedding provider to the `embedding_provider` argument when using the OpenAI Azure API"
)

if model is None:
model = OpenAIModel("gpt-3.5-turbo")
elif isinstance(model, str):
model = OpenAIModel(model)

if embedding_provider is None:
embedding_provider = OpenAIEmbeddingProvider()
Expand All @@ -65,6 +79,7 @@ def __init__(
self.description = description
self.goals = goals or []
self.model = model
self.embedding_provider = embedding_provider
self.temperature = temperature
self.sub_agents = {}
self.memory = LocalMemory(embedding_provider=embedding_provider)
Expand Down
1 change: 1 addition & 0 deletions loopgpt/embeddings/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from loopgpt.embeddings.provider import BaseEmbeddingProvider
from loopgpt.embeddings.openai_ import OpenAIEmbeddingProvider
from loopgpt.embeddings.azure_openai import AzureOpenAIEmbeddingProvider
from loopgpt.embeddings.hf import HuggingFaceEmbeddingProvider

user_providers = {}
Expand Down
46 changes: 46 additions & 0 deletions loopgpt/embeddings/azure_openai.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import numpy as np
import openai
from loopgpt.embeddings.openai_ import OpenAIEmbeddingProvider
from typing import Optional

from loopgpt.utils.openai_key import get_openai_key


class AzureOpenAIEmbeddingProvider(OpenAIEmbeddingProvider):
"""Creates an Azure OpenAI embedding provider from a deployment ID. Can be created only when ``openai.api_type`` is set to ``azure``.
:param deployment_id: The deployment ID of the embedding provider.
:type deployment_id: str
:param api_key: The API key to use for the embedding provider.
If not specified, it will be found from ``openai.api_key`` or ``.env`` file or the ``OPENAI_API_KEY`` environment variable.
:type api_key: str, optional
.. note::
See :class:`AzureOpenAIModel <loopgpt.models.azure_openai.AzureOpenAIModel>` also.
"""

def __init__(self, deployment_id: str, api_key: Optional[str] = None):
# sanity check
assert (
openai.api_type == "azure"
), "AzureOpenAIModel can only be used with Azure API"

self.deployment_id = deployment_id
self.api_key = api_key

def get(self, text: str):
api_key = get_openai_key(self.api_key)
return np.array(
openai.Embedding.create(
input=[text], engine=self.deployment_id, api_key=api_key
)["data"][0]["embedding"],
dtype=np.float32,
)

def config(self):
cfg = {"deployment_id": self.deployment_id, "api_key": self.api_key}
return cfg

@classmethod
def from_config(cls, config):
return cls(config["deployment_id"], config["api_key"])
24 changes: 13 additions & 11 deletions loopgpt/embeddings/openai_.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,31 @@
from typing import Optional
from loopgpt.embeddings.provider import BaseEmbeddingProvider
from loopgpt.utils.openai_key import get_openai_key
import numpy as np
import openai


class OpenAIEmbeddingProvider(BaseEmbeddingProvider):
def __init__(self, model: str = "text-embedding-ada-002"):
super(OpenAIEmbeddingProvider, self).__init__()
def __init__(
self, model: str = "text-embedding-ada-002", api_key: Optional[str] = None
):
self.model = model
self.api_key = api_key

def get(self, text: str):
import openai

api_key = get_openai_key(self.api_key)
return np.array(
openai.Embedding.create(input=[text], model="text-embedding-ada-002")[
"data"
][0]["embedding"],
openai.Embedding.create(
input=[text], model="text-embedding-ada-002", api_key=api_key
)["data"][0]["embedding"],
dtype=np.float32,
)

def config(self):
cfg = super().config()
cfg.update({"model": self.model})
cfg.update({"model": self.model, "api_key": self.api_key})
return cfg

@classmethod
def from_config(cls, config):
obj = cls()
obj.model = config["model"]
return obj
return cls(config["model"], config.get("api_key"))
4 changes: 2 additions & 2 deletions loopgpt/embeddings/provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@


class BaseEmbeddingProvider:
"""Base class for all embedding providers.
"""
"""Base class for all embedding providers."""

def get(self, text: str) -> np.ndarray:
raise NotImplementedError()

Expand Down
1 change: 1 addition & 0 deletions loopgpt/models/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from loopgpt.models.stable_lm import StableLMModel
from loopgpt.models.llama_cpp import LlamaCppModel
from loopgpt.models.openai_ import OpenAIModel
from loopgpt.models.azure_openai import AzureOpenAIModel
from loopgpt.models.hf import HuggingFaceModel
from loopgpt.models.base import *

Expand Down
116 changes: 116 additions & 0 deletions loopgpt/models/azure_openai.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
from typing import List, Dict, Optional
from loopgpt.models.openai_ import OpenAIModel
from loopgpt.utils.openai_key import get_openai_key
from loopgpt.logger import logger
from time import time

from openai.error import RateLimitError
import requests
import openai


def get_deployment_details(endpoint, deployment_id, api_version, api_key):
api_key = get_openai_key(api_key)
response = requests.get(
f"{endpoint}/openai/deployments/{deployment_id}?api-version={api_version}",
headers={"api-key": api_key},
)
return response.json()


def get_deployment_model(endpoint, deployment_id, api_version, api_key):
details = get_deployment_details(endpoint, deployment_id, api_version, api_key)
model = details["model"]

return {
"gpt-35-turbo": "gpt-3.5-turbo",
"gpt-4": "gpt-4",
"gpt-4-32k": "gpt-4-32k",
}[model]


class AzureOpenAIModel(OpenAIModel):
"""Creates an Azure OpenAI model from a deployment ID. Can be created only when ``openai.api_type`` is set to ``azure``.
:param deployment_id: The deployment ID of the model.
:type deployment_id: str
:param api_key: The API key to use for the model.
If not specified, it will be found from ``openai.api_key`` or ``.env`` file or the ``OPENAI_API_KEY`` environment variable.
:type api_key: str, optional
:raises AssertionError: If ``openai.api_type`` is not set to ``azure``.
.. note::
You will also need an embedding provider deployed (e.g., text-embedding-ada-002) for creating an agent.
Example:
.. code-block:: python
import os
import openai
import loopgpt
from loopgpt.models import AzureOpenAIModel
from loopgpt.embeddings import AzureOpenAIEmbeddingProvider
openai.api_type = "azure"
openai.api_base = "https://<your deployment>.openai.azure.com/"
openai.api_version = "2023-03-15-preview"
openai.api_key = os.getenv("OPENAI_API_KEY")
model = AzureOpenAIModel("my-gpt4-deployment")
embedding_provider = AzureOpenAIEmbeddingProvider("my-embeddings-deployment")
agent = loopgpt.Agent(model=model, embedding_provider=embedding_provider)
agent.chat("Hello, how are you?")
"""

def __init__(self, deployment_id: str, api_key: Optional[str] = None):
# sanity check
assert (
openai.api_type == "azure"
), "AzureOpenAIModel can only be used with Azure API"

self.deployment_id = deployment_id
self.api_key = api_key
self.endpoint = openai.api_base
self.api_version = openai.api_version
self.model = get_deployment_model(
self.endpoint, self.deployment_id, self.api_version, self.api_key
)

def chat(
self,
messages: List[Dict[str, str]],
max_tokens: Optional[int] = None,
temperature: float = 0.8,
) -> str:
api_key = get_openai_key(self.api_key)
num_retries = 3
for _ in range(num_retries):
try:
resp = openai.ChatCompletion.create(
engine=self.deployment_id,
messages=messages,
api_key=api_key,
max_tokens=max_tokens,
temperature=temperature,
)["choices"][0]["message"]["content"]
return resp

except RateLimitError:
logger.warn("Rate limit exceeded. Retrying after 20 seconds.")
time.sleep(20)
continue

def config(self):
cfg = super().config()
cfg.update(
{
"deployment_id": self.deployment_id,
}
)
return cfg

@classmethod
def from_config(cls, config):
return cls(config["deployment_id"], config.get("api_key"))
4 changes: 2 additions & 2 deletions loopgpt/models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@


class BaseModel:
"""Base class for all models.
"""
"""Base class for all models."""

def chat(
self,
messages: List[Dict[str, str]],
Expand Down
Loading

0 comments on commit c11f1ad

Please sign in to comment.