diff --git a/composer.json b/composer.json index 025a65b..ba4bab6 100644 --- a/composer.json +++ b/composer.json @@ -19,7 +19,8 @@ "symfony/property-info": "^6.4 || ^7.1", "symfony/serializer": "^6.4 || ^7.1", "symfony/type-info": "^6.4 || ^7.1", - "symfony/uid": "^6.4 || ^7.1" + "symfony/uid": "^6.4 || ^7.1", + "webmozart/assert": "^1.11" }, "require-dev": { "codewithkyrian/chromadb-php": "^0.2.1", diff --git a/examples/chat-gpt-azure.php b/examples/chat-gpt-azure.php index a281de9..62a932a 100755 --- a/examples/chat-gpt-azure.php +++ b/examples/chat-gpt-azure.php @@ -16,7 +16,7 @@ getenv('AZURE_OPENAI_VERSION'), getenv('AZURE_OPENAI_KEY') ); -$llm = new Gpt($runtime, Version::GPT_4o_MINI); +$llm = new Gpt($runtime, Version::gpt4oMini()); $chain = new Chain($llm); $messages = new MessageBag( diff --git a/examples/chat-gpt-openai.php b/examples/chat-gpt-openai.php index bcff84b..5da2f65 100755 --- a/examples/chat-gpt-openai.php +++ b/examples/chat-gpt-openai.php @@ -11,7 +11,7 @@ require_once dirname(__DIR__).'/vendor/autoload.php'; $runtime = new OpenAI(HttpClient::create(), getenv('OPENAI_API_KEY')); -$llm = new Gpt($runtime, Version::GPT_4o_MINI); +$llm = new Gpt($runtime, Version::gpt4oMini()); $chain = new Chain($llm); $messages = new MessageBag( diff --git a/examples/structured-output-math.php b/examples/structured-output-math.php index 75fba68..1d316ea 100644 --- a/examples/structured-output-math.php +++ b/examples/structured-output-math.php @@ -17,7 +17,7 @@ require_once dirname(__DIR__).'/vendor/autoload.php'; $runtime = new OpenAI(HttpClient::create(), getenv('OPENAI_API_KEY')); -$llm = new Gpt($runtime, Version::GPT_4o_MINI); +$llm = new Gpt($runtime, Version::gpt4oMini()); $responseFormatFactory = new ResponseFormatFactory(SchemaFactory::create()); $serializer = new Serializer([new ObjectNormalizer()], [new JsonEncoder()]); diff --git a/examples/toolbox-clock.php b/examples/toolbox-clock.php index 5034d6b..c3ac58c 100755 --- a/examples/toolbox-clock.php +++ b/examples/toolbox-clock.php @@ -15,7 +15,7 @@ require_once dirname(__DIR__).'/vendor/autoload.php'; $runtime = new OpenAI(HttpClient::create(), getenv('OPENAI_API_KEY')); -$llm = new Gpt($runtime, Version::GPT_4o_MINI); +$llm = new Gpt($runtime, Version::gpt4oMini()); $clock = new Clock(new SymfonyClock()); $toolBox = new ToolBox(new ToolAnalyzer(), [$clock]); diff --git a/examples/toolbox-serpapi.php b/examples/toolbox-serpapi.php index 9e8472f..674281f 100755 --- a/examples/toolbox-serpapi.php +++ b/examples/toolbox-serpapi.php @@ -15,7 +15,7 @@ $httpClient = HttpClient::create(); $runtime = new OpenAI($httpClient, getenv('OPENAI_API_KEY')); -$llm = new Gpt($runtime, Version::GPT_4o_MINI); +$llm = new Gpt($runtime, Version::gpt4oMini()); $serpApi = new SerpApi($httpClient, getenv('SERP_API_KEY')); $toolBox = new ToolBox(new ToolAnalyzer(), [$serpApi]); diff --git a/examples/toolbox-weather.php b/examples/toolbox-weather.php index 5597df1..a70060c 100755 --- a/examples/toolbox-weather.php +++ b/examples/toolbox-weather.php @@ -15,7 +15,7 @@ $httpClient = HttpClient::create(); $runtime = new OpenAI($httpClient, getenv('OPENAI_API_KEY')); -$llm = new Gpt($runtime, Version::GPT_4o_MINI); +$llm = new Gpt($runtime, Version::gpt4oMini()); $wikipedia = new OpenMeteo($httpClient); $toolBox = new ToolBox(new ToolAnalyzer(), [$wikipedia]); diff --git a/examples/toolbox-wikipedia.php b/examples/toolbox-wikipedia.php index eb9acce..0f9fca1 100755 --- a/examples/toolbox-wikipedia.php +++ b/examples/toolbox-wikipedia.php @@ -15,7 +15,7 @@ $httpClient = HttpClient::create(); $runtime = new OpenAI($httpClient, getenv('OPENAI_API_KEY')); -$llm = new Gpt($runtime, Version::GPT_4o_MINI); +$llm = new Gpt($runtime, Version::gpt4oMini()); $wikipedia = new Wikipedia($httpClient); $toolBox = new ToolBox(new ToolAnalyzer(), [$wikipedia]); diff --git a/examples/toolbox-youtube.php b/examples/toolbox-youtube.php index 458017e..cc5b96b 100755 --- a/examples/toolbox-youtube.php +++ b/examples/toolbox-youtube.php @@ -15,7 +15,7 @@ $httpClient = HttpClient::create(); $runtime = new OpenAI($httpClient, getenv('OPENAI_API_KEY')); -$llm = new Gpt($runtime, Version::GPT_4o_MINI); +$llm = new Gpt($runtime, Version::gpt4oMini()); $transcriber = new YouTubeTranscriber($httpClient); $toolBox = new ToolBox(new ToolAnalyzer(), [$transcriber]); diff --git a/src/Anthropic/Model/Claude.php b/src/Anthropic/Model/Claude.php index ccd4fff..d66f8e8 100644 --- a/src/Anthropic/Model/Claude.php +++ b/src/Anthropic/Model/Claude.php @@ -11,21 +11,22 @@ use PhpLlm\LlmChain\Response\Choice; use PhpLlm\LlmChain\Response\Response; -final readonly class Claude implements LanguageModel +final class Claude implements LanguageModel { public function __construct( - private ClaudeRuntime $runtime, - private Version $version = Version::SONNET_35, - private float $temperature = 1.0, - private int $maxTokens = 1000, + private readonly ClaudeRuntime $runtime, + private ?Version $version = null, + private readonly float $temperature = 1.0, + private readonly int $maxTokens = 1000, ) { + $this->version ??= Version::sonnet35(); } public function call(MessageBag $messages, array $options = []): Response { $system = $messages->getSystemMessage(); $body = [ - 'model' => $this->version->value, + 'model' => $this->version->name, 'temperature' => $this->temperature, 'max_tokens' => $this->maxTokens, 'system' => $system->content, diff --git a/src/Anthropic/Model/Claude/Version.php b/src/Anthropic/Model/Claude/Version.php index 12d054e..83beb2f 100644 --- a/src/Anthropic/Model/Claude/Version.php +++ b/src/Anthropic/Model/Claude/Version.php @@ -4,10 +4,33 @@ namespace PhpLlm\LlmChain\Anthropic\Model\Claude; -enum Version: string +use Webmozart\Assert\Assert; + +final readonly class Version { - case HAIKU_3 = 'claude-3-haiku-20240307'; - case SONNET_3 = 'claude-3-sonnet-20240229'; - case SONNET_35 = 'claude-3-5-sonnet-20240620'; - case OPUS = 'claude-3-opus-20240229'; + public function __construct( + public string $name, + ) { + Assert::stringNotEmpty($name); + } + + public static function haiku3(): self + { + return new self('claude-3-haiku-20240307'); + } + + public static function sonnet3(): self + { + return new self('claude-3-sonnet-20240229'); + } + + public static function sonnet35(): self + { + return new self('claude-3-5-sonnet-20240620'); + } + + public static function opus(): self + { + return new self('claude-3-opus-20240229'); + } } diff --git a/src/OpenAI/Model/Embeddings.php b/src/OpenAI/Model/Embeddings.php index 0677aaf..f293821 100644 --- a/src/OpenAI/Model/Embeddings.php +++ b/src/OpenAI/Model/Embeddings.php @@ -9,12 +9,13 @@ use PhpLlm\LlmChain\OpenAI\Model\Embeddings\Version; use PhpLlm\LlmChain\OpenAI\Runtime; -final readonly class Embeddings implements EmbeddingModel +final class Embeddings implements EmbeddingModel { public function __construct( - private Runtime $runtime, - private Version $version = Version::EMBEDDING_3_SMALL, + private readonly Runtime $runtime, + private ?Version $version = null, ) { + $this->version ??= Version::textEmbedding3Small(); } public function create(string $text): Vector @@ -37,12 +38,12 @@ public function multiCreate(array $texts): array } /** - * @return array{model: string, input: string} + * @return array{model: non-empty-string, input: string} */ private function createBody(string $text): array { return [ - 'model' => $this->version->value, + 'model' => $this->version->name, 'input' => $text, ]; } diff --git a/src/OpenAI/Model/Embeddings/Version.php b/src/OpenAI/Model/Embeddings/Version.php index 7589df3..f56c0ff 100644 --- a/src/OpenAI/Model/Embeddings/Version.php +++ b/src/OpenAI/Model/Embeddings/Version.php @@ -4,9 +4,31 @@ namespace PhpLlm\LlmChain\OpenAI\Model\Embeddings; -enum Version: string +use Webmozart\Assert\Assert; + +final readonly class Version { - case EMBEDDING_ADA_002 = 'text-embedding-ada-002'; - case EMBEDDING_3_LARGE = 'text-embedding-3-large'; - case EMBEDDING_3_SMALL = 'text-embedding-3-small'; + /** + * @param non-empty-string $name + */ + public function __construct( + public string $name, + ) { + Assert::stringNotEmpty($name); + } + + public static function textEmbeddingAda002(): self + { + return new self('text-embedding-ada-002'); + } + + public static function textEmbedding3Large(): self + { + return new self('text-embedding-3-large'); + } + + public static function textEmbedding3Small(): self + { + return new self('text-embedding-3-small'); + } } diff --git a/src/OpenAI/Model/Gpt.php b/src/OpenAI/Model/Gpt.php index 85596dd..f87c837 100644 --- a/src/OpenAI/Model/Gpt.php +++ b/src/OpenAI/Model/Gpt.php @@ -12,19 +12,20 @@ use PhpLlm\LlmChain\Response\Response; use PhpLlm\LlmChain\Response\ToolCall; -final readonly class Gpt implements LanguageModel +final class Gpt implements LanguageModel { public function __construct( - private Runtime $runtime, - private Version $version = Version::GPT_4o, - private float $temperature = 1.0, + private readonly Runtime $runtime, + private ?Version $version = null, + private readonly float $temperature = 1.0, ) { + $this->version ??= Version::gpt4o(); } public function call(MessageBag $messages, array $options = []): Response { $body = [ - 'model' => $this->version->value, + 'model' => $this->version->name, 'temperature' => $this->temperature, 'messages' => $messages, ]; @@ -41,7 +42,7 @@ public function supportsToolCalling(): bool public function supportsStructuredOutput(): bool { - return $this->version->supportsStructuredOutput(); + return $this->version->supportStructuredOutput; } /** diff --git a/src/OpenAI/Model/Gpt/Version.php b/src/OpenAI/Model/Gpt/Version.php index 2c48448..8184460 100644 --- a/src/OpenAI/Model/Gpt/Version.php +++ b/src/OpenAI/Model/Gpt/Version.php @@ -4,19 +4,57 @@ namespace PhpLlm\LlmChain\OpenAI\Model\Gpt; -enum Version: string +use Webmozart\Assert\Assert; + +final readonly class Version { - case GPT_35_TURBO = 'gpt-3.5-turbo'; - case GPT_35_TURBO_INSTRUCT = 'gpt-3.5-turbo-instruct'; - case GPT_4 = 'gpt-4'; - case GPT_4_TURBO = 'gpt-4-turbo'; - case GPT_4o = 'gpt-4o'; - case GPT_4o_MINI = 'gpt-4o-mini'; - case o1_MINI = 'o1-mini'; - case o1_PREVIEW = 'o1-preview'; - - public function supportsStructuredOutput(): bool - { - return self::GPT_4o === $this || self::GPT_4o_MINI === $this; + /** + * @param non-empty-string $name + */ + public function __construct( + public string $name, + public bool $supportStructuredOutput, + ) { + Assert::stringNotEmpty($name); + } + + public static function gpt35Turbo(): self + { + return new self('gpt-3.5-turbo', false); + } + + public static function gpt35TurboInstruct(): self + { + return new self('gpt-3.5-turbo-instruct', false); + } + + public static function gpt4(): self + { + return new self('gpt-4', false); + } + + public static function gpt4Turbo(): self + { + return new self('gpt-4-turbo', false); + } + + public static function gpt4o(): self + { + return new self('gpt-4o', true); + } + + public static function gpt4oMini(): self + { + return new self('gpt-4o-mini', true); + } + + public static function o1Mini(): self + { + return new self('o1-mini', false); + } + + public static function o1Preview(): self + { + return new self('o1-preview', false); } }