diff --git a/llm-server/app.py b/llm-server/app.py index 3b917081d..15c9648a1 100644 --- a/llm-server/app.py +++ b/llm-server/app.py @@ -3,7 +3,7 @@ import requests from flask import Flask, request from langchain.chains.openai_functions import create_structured_output_chain -from langchain.chat_models import ChatOpenAI +from langchain.chat_models import ChatOpenAI, ChatLiteLLM from langchain.prompts import ChatPromptTemplate from langchain.utilities.openapi import OpenAPISpec @@ -67,7 +67,7 @@ def handle(): warnings.warn(str(e)) json_output = None - llm = ChatOpenAI(model="gpt-3.5-turbo-0613", temperature=0) + llm = ChatLiteLLM(model="gpt-3.5-turbo-0613", temperature=0) if json_output is None: prompt_msgs = non_api_base_prompt(base_prompt, text) diff --git a/llm-server/requirements.txt b/llm-server/requirements.txt index 44914d6a1..36539297c 100644 --- a/llm-server/requirements.txt +++ b/llm-server/requirements.txt @@ -46,6 +46,7 @@ jsonschema==4.19.0 jsonschema-specifications==2023.7.1 langchain==0.0.232 langcodes==3.3.0 +litellm>=0.1.574 langsmith==0.0.5 loguru==0.7.1 manifest-ml==0.0.1