Skip to content

Commit

Permalink
Merge pull request #363 from SylphAI-Inc/integrations
Browse files Browse the repository at this point in the history
Multiple LLM provider integrations
  • Loading branch information
Sylph-AI authored Feb 7, 2025
2 parents 3a66bb7 + f66a87f commit 6188d2a
Show file tree
Hide file tree
Showing 19 changed files with 1,408 additions and 356 deletions.
27 changes: 26 additions & 1 deletion adalflow/adalflow/components/model_client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,27 @@
)

DeepSeekClient = LazyImport(
"adalflow.components.model_client.deepseek_client.DeepSeekClient", None
"adalflow.components.model_client.deepseek_client.DeepSeekClient",
OptionalPackages.OPENAI,
)

MistralClient = LazyImport(
"adalflow.components.model_client.mistral_client.MistralClient",
OptionalPackages.MISTRAL,
)

XAIClient = LazyImport(
"adalflow.components.model_client.xai_client.XAIClient", OptionalPackages.OPENAI
)

FireworksClient = LazyImport(
"adalflow.components.model_client.fireworks_client.FireworksClient",
OptionalPackages.FIREWORKS,
)

SambaNovaClient = LazyImport(
"adalflow.components.model_client.sambanova_client.SambaNovaClient",
OptionalPackages.OPENAI,
)

GoogleGenAIClient = LazyImport(
Expand Down Expand Up @@ -87,6 +107,11 @@
"GoogleGenAIClient",
"OllamaClient",
"TogetherClient",
"DeepSeekClient",
"MistralClient",
"XAIClient",
"FireworksClient",
"SambaNovaClient",
]

for name in __all__:
Expand Down
53 changes: 53 additions & 0 deletions adalflow/adalflow/components/model_client/fireworks_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
from typing import Optional, Callable, Any, Literal
from openai.types import Completion
from adalflow.components.model_client.openai_client import OpenAIClient

BASE_URL = "https://api.fireworks.ai/inference/v1/"


class FireworksClient(OpenAIClient):
__doc__ = r"""A component wrapper for Fireworks AI's OpenAI-compatible API.
This class extends :class:`OpenAIClient` by customizing several key parameters:
- Sets the API base URL to ``"https://api.fireworks.ai/inference/v1/"``.
- Uses the environment variable ``"FIREWORKS_API_KEY"`` to obtain the API key.
- Defaults the input type to ``"messages"``, which is suitable for multi-turn chat interactions.
**Example usage with AdalFlow Generator:**
.. code-block:: python
from adalflow.core import Generator
from adalflow.components.model_client.fireworks_client import FireworksClient
generator = Generator(
model_client=FireworksClient(),
model_kwargs={
"model": "accounts/fireworks/models/llama-v3p1-8b-instruct",
"temperature": 0.7,
}
)
prompt_kwargs = {
"input_str": "Hello from Fireworks AI! Can you summarize the concept of quantum mechanics?"
}
response = generator(prompt_kwargs)
"""

def __init__(
self,
api_key: Optional[str] = None,
chat_completion_parser: Callable[[Completion], Any] = None,
input_type: Literal["text", "messages"] = "text",
base_url: str = BASE_URL,
env_api_key_name: str = "FIREWORKS_API_KEY",
):
super().__init__(
api_key=api_key,
chat_completion_parser=chat_completion_parser,
input_type=input_type,
base_url=base_url,
env_api_key_name=env_api_key_name,
)
101 changes: 101 additions & 0 deletions adalflow/adalflow/components/model_client/mistral_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
from typing import Optional, Any, Callable, Literal
from openai.types import Completion
from adalflow.components.model_client.openai_client import OpenAIClient

BASE_URL = "https://api.mistral.ai/v1"


class MistralClient(OpenAIClient):
__doc__ = r"""A minimal Mistral client that inherits from :class:`OpenAIClient`.
This client is designed to work with Mistral’s API by setting:
- The API base URL to ``https://api.mistral.ai/v1``.
- The API key is fetched from the environment variable ``MISTRAL_API_KEY`` if not provided.
- The input format is supported as either ``"text"`` or ``"messages"``.
- The AdalFlow Generator is expected to supply additional model parameters (such as model name, temperature, and max_tokens)
in a single configuration point.
**Example usage with the AdalFlow Generator:**
.. code-block:: python
import os
from adalflow.core import Generator
from adalflow.components.model_client.mistral_client import MistralClient
from adalflow.utils import setup_env
setup_env()
generator = Generator(
model_client=MistralClient(),
model_kwargs={
"model": "mistral-large-latest",
"temperature": 0.7,
"max_tokens": 2000,
}
)
prompt_kwargs = {"input_str": "Explain the concept of machine learning."}
response = generator(prompt_kwargs)
"""

def __init__(
self,
api_key: Optional[str] = None,
base_url: str = BASE_URL,
input_type: Literal["text", "messages"] = "text",
env_api_key_name: str = "MISTRAL_API_KEY",
chat_completion_parser: Callable[[Completion], Any] = None,
):
"""
Initialize a MistralClient instance.
:param api_key: Mistral API key. If None, reads from the environment variable ``MISTRAL_API_KEY``.
:param base_url: URL for Mistral’s endpoint (default: ``https://api.mistral.ai/v1``).
:param input_type: Input format, either ``"text"`` or ``"messages"``.
:param env_api_key_name: Name of the environment variable to use for the Mistral API key (default: ``MISTRAL_API_KEY``).
:param chat_completion_parser: Optional function to parse responses from Mistral's API.
"""
super().__init__(
api_key=api_key,
chat_completion_parser=chat_completion_parser,
input_type=input_type,
base_url=base_url,
env_api_key_name=env_api_key_name,
)


if __name__ == "__main__":
import os
from adalflow.core import Generator
from adalflow.utils import setup_env, get_logger

# Set up logging and load environment variables.
get_logger(enable_file=False)
setup_env()

# Instantiate the MistralClient; the API key will be obtained from the environment if not explicitly provided.
client = MistralClient(api_key=os.getenv("MISTRAL_API_KEY"), input_type="messages")

# Create the Generator using the MistralClient and specify model parameters.
generator = Generator(
model_client=client,
model_kwargs={
"model": "mistral-large-latest",
"temperature": 0.7,
"max_tokens": 2000,
},
)

# Define the prompt to be processed.
prompt_kwargs = {"input_str": "Explain the concept of machine learning."}

# Generate and output the response.
response = generator(prompt_kwargs)

if response.error:
print(f"[Mistral] Generator Error: {response.error}")
else:
print(f"[Mistral] Response: {response.data}")
62 changes: 62 additions & 0 deletions adalflow/adalflow/components/model_client/sambanova_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
from typing import Optional, Any, Callable, Literal
from openai.types import Completion
from adalflow.components.model_client.openai_client import OpenAIClient

BASE_URL = "https://api.sambanova.ai/v1/"
__all__ = ["SambaNovaClient"]


class SambaNovaClient(OpenAIClient):
__doc__ = r"""A component wrapper for SambaNova's OpenAI-compatible API.
This client extends :class:`OpenAIClient` and customizes:
- The API key is read from the environment variable ``SAMBANOVA_API_KEY`` if not provided explicitly.
**Example usage with the AdalFlow Generator:**
.. code-block:: python
from adalflow.core import Generator
from adalflow.components.model_client.sambanova_client import SambaNovaClient
generator = Generator(
model_client=SambaNovaClient(),
model_kwargs={
"model": "Meta-Llama-3.1-8B-Instruct",
"temperature": 0.7,
"top_p": 0.9,
},
)
prompt_kwargs = {
"input_str": "Hello from SambaNova! Can you summarize the concept of quantum computing in simple terms?"
}
response = generator(prompt_kwargs)
"""

def __init__(
self,
api_key: Optional[str] = None,
chat_completion_parser: Callable[[Completion], Any] = None,
input_type: Literal["text", "messages"] = "text",
base_url: str = BASE_URL,
env_api_key_name: str = "SAMBANOVA_API_KEY",
):
"""
Initialize a SambaNovaClient instance.
:param api_key: (Optional) SambaNova API key. If not provided, the client attempts to read from the
environment variable ``SAMBANOVA_API_KEY``.
:param chat_completion_parser: (Optional) A custom function to parse SambaNova responses.
:param input_type: Specifies the input format, either ``"text"`` or ``"messages"``. Defaults to ``"messages"``.
:param base_url: SambaNova API endpoint. Defaults to ``"https://api.sambanova.ai/v1/"``.
:param env_api_key_name: The name of the environment variable holding the API key. Defaults to ``SAMBANOVA_API_KEY``.
"""
super().__init__(
api_key=api_key,
chat_completion_parser=chat_completion_parser,
input_type=input_type,
base_url=base_url,
env_api_key_name=env_api_key_name,
)
Loading

0 comments on commit 6188d2a

Please sign in to comment.