diff --git a/e2e.py b/e2e.py index 15a31a2..220e199 100644 --- a/e2e.py +++ b/e2e.py @@ -16,6 +16,7 @@ mistral, octoai, openai, + openrouter, perplexity, prem, replicate, @@ -75,6 +76,7 @@ def run_single_connector(connector_name: str) -> None: "deepinfra": (openai.OpenAIConnector, "DEEP_INFRA_API_KEY"), "perplexity": (perplexity.PerplexityAIConnector, "PERPLEXITY_API_KEY"), "anyscale": (anyscale.AnyscaleEndpointsConnector, "ANYSCALE_API_KEY"), + "openrouter": (openrouter.OpenRouterConnector, "OPENROUTER_API_KEY"), } if connector_name == "deepinfra": diff --git a/prem_utils/connectors/openrouter.py b/prem_utils/connectors/openrouter.py new file mode 100644 index 0000000..089f18c --- /dev/null +++ b/prem_utils/connectors/openrouter.py @@ -0,0 +1,72 @@ +from collections.abc import Sequence +from typing import Any + +from prem_utils.connectors.openai import OpenAIConnector + + +class OpenRouterConnector(OpenAIConnector): + def __init__( + self, api_key: str, base_url: str = "https://openrouter.ai/api/v1/", prompt_template: str = None + ) -> None: + super().__init__(prompt_template=prompt_template, base_url=base_url, api_key=api_key) + + def chat_completion( + self, + model: str, + messages: list[dict[str, Any]], + max_tokens: int = 512, + frequency_penalty: float = 0.1, + presence_penalty: float = 0, + seed: int | None = None, + stop: str | list[str] = None, + stream: bool = False, + temperature: float = 1, + top_p: float = 1, + tools: list[dict[str, Any]] = None, + tool_choice: dict = None, + ): + model = model.replace("openrouter/", "", 1) + + return super().chat_completion( + model=model, + messages=messages, + tools=tools, + tool_choice=tool_choice, + stream=stream, + max_tokens=max_tokens, + frequency_penalty=frequency_penalty, + presence_penalty=presence_penalty, + seed=seed, + stop=stop, + temperature=temperature, + top_p=top_p, + ) + + def embeddings( + self, + model: str, + input: str | Sequence[str] | Sequence[int] | Sequence[Sequence[int]], + encoding_format: str = "float", + user: str = None, + ): + raise NotImplementedError + + def finetuning( + self, model: str, training_data: list[dict], validation_data: list[dict] | None = None, num_epochs: int = 3 + ) -> str: + raise NotImplementedError + + def get_finetuning_job(self, job_id) -> dict[str, Any]: + raise NotImplementedError + + def generate_image( + self, + model: str, + prompt: str, + size: str = "1024x1024", + n: int = 1, + quality: str = "standard", + style: str = "vivid", + response_format: str = "url", + ): + raise NotImplementedError diff --git a/prem_utils/models.json b/prem_utils/models.json index 1db9787..2ad70cc 100644 --- a/prem_utils/models.json +++ b/prem_utils/models.json @@ -624,6 +624,387 @@ } ] }, + { + "provider": "openrouter", + "models": [ + { + "slug": "openrouter/openrouter/auto", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/jondurbin/bagel-34b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/jebcarter/psyfighter-13b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/koboldai/psyfighter-13b-2", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/neversleep/noromaid-mixtral-8x7b-instruct", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/nousresearch/nous-hermes-llama2-13b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/meta-llama/codellama-34b-instruct", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/phind/phind-codellama-34b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/intel/neural-chat-7b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/mistralai/mixtral-8x7b-instruct", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/nousresearch/nous-hermes-2-mixtral-8x7b-dpo", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/nousresearch/nous-hermes-2-mixtral-8x7b-sft", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/haotian-liu/llava-13b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/nousresearch/nous-hermes-2-vision-7b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/meta-llama/llama-2-13b-chat", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/migtissera/synthia-70b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/pygmalionai/mythalion-13b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/undi95/remm-slerp-l2-13b-6k", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/gryphe/mythomax-l2-13b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/xwin-lm/xwin-lm-70b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/gryphe/mythomax-l2-13b-8k", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/alpindale/goliath-120b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/neversleep/noromaid-20b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/gryphe/mythomist-7b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/mancer/weaver", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/nousresearch/nous-capybara-7b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/codellama/codellama-70b-instruct", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/teknium/openhermes-2-mistral-7b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/teknium/openhermes-2.5-mistral-7b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/undi95/remm-slerp-l2-13b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/undi95/toppy-m-7b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/openrouter/cinematika-7b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/01-ai/yi-34b-chat", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/01-ai/yi-34b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/01-ai/yi-6b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/togethercomputer/stripedhyena-nous-7b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/togethercomputer/stripedhyena-hessian-7b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/mistralai/mixtral-8x7b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/nousresearch/nous-hermes-yi-34b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/nousresearch/nous-hermes-2-mistral-7b-dpo", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/open-orca/mistral-7b-openorca", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/huggingfaceh4/zephyr-7b-beta", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/google/palm-2-chat-bison", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/google/palm-2-codechat-bison", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/google/palm-2-chat-bison-32k", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/google/palm-2-codechat-bison-32k", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/google/gemini-pro", + "model_type": "text2text", + "context_tokens": 128000 + }, + + { + "slug": "openrouter/perplexity/pplx-70b-online", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/perplexity/pplx-7b-online", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/perplexity/pplx-7b-chat", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/perplexity/pplx-70b-chat", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/meta-llama/llama-2-70b-chat", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/nousresearch/nous-capybara-34b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/jondurbin/airoboros-l2-70b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/austism/chronos-hermes-13b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/mistralai/mistral-7b-instruct", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/openchat/openchat-7b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/lizpreciatior/lzlv-70b-fp16-hf", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/cognitivecomputations/dolphin-mixtral-8x7b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/rwkv/rwkv-5-world-3b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/recursal/rwkv-5-3b-ai-town", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/recursal/eagle-7b", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/google/gemma-7b-it", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/anthropic/claude-2", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/anthropic/claude-2.1", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/anthropic/claude-2.0", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/anthropic/claude-instant-1", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/anthropic/claude-instant-1.2", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/anthropic/claude-2:beta", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/anthropic/claude-2.1:beta", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/anthropic/claude-2.0:beta", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/anthropic/claude-instant-1:beta", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/mistralai/mistral-tiny", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/mistralai/mistral-small", + "model_type": "text2text", + "context_tokens": 128000 + }, + { + "slug": "openrouter/mistralai/mistral-medium", + "model_type": "text2text", + "context_tokens": 128000 + } + ] + }, { "provider": "perplexity",