Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add the anthropic interface #3

Merged
merged 1 commit into from
Jul 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .env.sample
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
ANTHROPIC_API_KEY=""
GROQ_API_KEY=""
OPENAI_API_KEY=""
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@ repos:
# supported by your project here, or alternatively use
# pre-commit's default_language_version, see
# https://pre-commit.com/#top_level-default_language_version
language_version: python3.11
language_version: python3.12
2 changes: 1 addition & 1 deletion aimodels/client/completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def create(self, model=None, temperature=0, messages=None):
interface, model_name = self.topmost_instance.get_provider_interface(model)

return interface.chat_completion_create(
messages,
messages=messages,
model=model_name,
temperature=temperature,
)
4 changes: 2 additions & 2 deletions aimodels/client/multi_fm_client.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""MultiFMClient manages a Chat across multiple provider interfaces."""

from ..providers.openai_interface import OpenAIInterface
from ..providers.groq_interface import GroqInterface
from ..providers import AnthropicInterface, OpenAIInterface, GroqInterface
from .chat import Chat


Expand Down Expand Up @@ -30,6 +29,7 @@ def __init__(self):
self.all_factories = {
"openai": OpenAIInterface,
"groq": GroqInterface,
"anthropic": AnthropicInterface,
}

def get_provider_interface(self, model):
Expand Down
1 change: 1 addition & 0 deletions aimodels/framework/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
"""Provides the ProviderInterface for defining the interface that all FM providers must implement."""

from .provider_interface import ProviderInterface
from .chat_completion_response import ChatCompletionResponse
8 changes: 8 additions & 0 deletions aimodels/framework/chat_completion_response.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from aimodels.framework.choice import Choice


class ChatCompletionResponse:
"""Used to conform to the response model of OpenAI"""

def __init__(self):
self.choices = [Choice()] # Adjust the range as needed for more choices
6 changes: 6 additions & 0 deletions aimodels/framework/choice.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from aimodels.framework.message import Message


class Choice:
def __init__(self):
self.message = Message()
6 changes: 6 additions & 0 deletions aimodels/framework/message.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
"""Interface to hold contents of api responses when they do not conform to the OpenAI style response"""


class Message:
def __init__(self):
self.content = None
2 changes: 2 additions & 0 deletions aimodels/providers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Provides the individual provider interfaces for each FM provider."""

from .openai_interface import OpenAIInterface
from .groq_interface import GroqInterface
from .anthropic_interface import AnthropicInterface
67 changes: 67 additions & 0 deletions aimodels/providers/anthropic_interface.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
"""The interface to the Anthropic API."""

import os

from aimodels.framework import ProviderInterface, ChatCompletionResponse


class AnthropicInterface(ProviderInterface):
"""Implements the ProviderInterface for interacting with the Anthropic API."""

def __init__(self):
"""Set up the Anthropic API client with the key from the user's environment"""
import anthropic

self.anthropic_client = anthropic.Anthropic(
api_key=os.getenv("ANTHROPIC_API_KEY")
)

def chat_completion_create(self, messages=None, model=None, temperature=0):
"""Request chat completions from the Anthropic API.

Args:
----
model (str): Identifies the specific provider/model to use.
messages (list of dict): A list of message objects in chat history.
temperature (float): The temperature to use in the completion.

Returns:
-------
The ChatCompletionResponse with the completion result.

"""
anthropic_messages = []
system_message = None
for msg in messages:
if "role" in msg and "content" in msg:
if msg["role"] == "system":
system_message = msg["content"]
else:
temp_msg = msg.copy()
temp_msg["content"] = [
{"type": "text", "text": temp_msg["content"]}
]
anthropic_messages.append(temp_msg)

if system_message is None:
response = self.anthropic_client.messages.create(
messages=anthropic_messages,
model=model,
max_tokens=4096,
temperature=temperature,
)
else:
response = self.anthropic_client.messages.create(
messages=anthropic_messages,
model=model,
system=system_message,
max_tokens=4096,
temperature=temperature,
)

text_response = response.content[0].text

chat_completion_response = ChatCompletionResponse()
chat_completion_response.choices[0].message.content = text_response

return chat_completion_response
92 changes: 92 additions & 0 deletions examples/test_anthropic.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
{
"cells": [
{
"cell_type": "code",
"id": "initial_id",
"metadata": {
"collapsed": true,
"ExecuteTime": {
"end_time": "2024-07-02T23:20:19.015491Z",
"start_time": "2024-07-02T23:20:19.004272Z"
}
},
"source": [
"import sys\n",
"sys.path.append('../aimodels')\n",
"\n",
"from dotenv import load_dotenv, find_dotenv\n",
"\n",
"load_dotenv(find_dotenv())"
],
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 1,
"metadata": {},
"output_type": "execute_result"
}
],
"execution_count": 1
},
{
"metadata": {
"ExecuteTime": {
"end_time": "2024-07-03T02:22:26.282827Z",
"start_time": "2024-07-03T02:22:18.193996Z"
}
},
"cell_type": "code",
"source": [
"from aimodels.client import MultiFMClient\n",
"\n",
"client = MultiFMClient()\n",
"model_string = \"anthropic:claude-3-opus-20240229\"\n",
"messages=[{\"role\": \"system\", \"content\": \"Respond in Pirate English.\"}, \n",
" {\"role\": \"user\", \"content\": \"Tell me a joke\"} ] \n",
"\n",
"response = client.chat.completions.create(model=model_string, messages=messages)\n",
"print(response.choices[0].message.content)"
],
"id": "adebd2f0b578a909",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Arrr, me bucko, 'ere be a jolly jest fer ye!\n",
"\n",
"What did th' pirate say on 'is 80th birthday? \"Aye matey!\"\n",
"\n",
"Ye see, it be a play on words, as \"Aye matey\" sounds like \"I'm eighty\". Har har har! 'Tis a clever bit o' pirate humor, if I do say so meself. Now, 'ow about ye fetch me a mug o' grog while I spin ye another yarn?\n"
]
}
],
"execution_count": 6
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.6"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
Loading
Loading