forked from containers/ai-lab-recipes
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Fixes containers#562 Signed-off-by: Jeff MAURY <[email protected]>
- Loading branch information
Showing
5 changed files
with
162 additions
and
0 deletions.
There are no files selected for viewing
9 changes: 9 additions & 0 deletions
9
recipes/natural_language_processing/function_calling/README.md
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
|
||
Currently, we need the fix from https://github.com/abetlen/llama-cpp-python/pull/1509 | ||
``` | ||
pip install git+https://github.com/abetlen/llama-cpp-python.git@refs/pull/1509/head | ||
``` | ||
|
||
get the path where the lib is installed and add the name property to line 225 | ||
|
||
/opt/app-root/lib64/python3.11/site-packages/llama_cpp/llama_types.py |
27 changes: 27 additions & 0 deletions
27
recipes/natural_language_processing/function_calling/ai-lab.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
version: v1.0 | ||
application: | ||
type: language | ||
name: Function_Calling_Streamlit | ||
description: Function calling a remote service with a model service in a web frontend. | ||
containers: | ||
- name: llamacpp-server | ||
contextdir: ../../../model_servers/llamacpp_python | ||
containerfile: ./base/Containerfile | ||
model-service: true | ||
backend: | ||
- llama-cpp | ||
arch: | ||
- arm64 | ||
- amd64 | ||
ports: | ||
- 8001 | ||
image: quay.io/ai-lab/llamacpp_python:latest | ||
- name: streamlit-function-calling-app | ||
contextdir: app | ||
containerfile: Containerfile | ||
arch: | ||
- arm64 | ||
- amd64 | ||
ports: | ||
- 8501 | ||
image: quay.io/ai-lab/function-calling:latest |
8 changes: 8 additions & 0 deletions
8
recipes/natural_language_processing/function_calling/app/Containerfile
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
FROM registry.access.redhat.com/ubi9/python-311:1-72.1722518949 | ||
WORKDIR /function-call | ||
COPY requirements.txt . | ||
RUN pip install --upgrade pip | ||
RUN pip install --no-cache-dir --upgrade -r /function-call/requirements.txt | ||
COPY *.py . | ||
EXPOSE 8501 | ||
ENTRYPOINT [ "streamlit", "run", "app.py" ] |
114 changes: 114 additions & 0 deletions
114
recipes/natural_language_processing/function_calling/app/app.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,114 @@ | ||
from langchain_openai import ChatOpenAI | ||
from langchain.chains import LLMChain | ||
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder | ||
from langchain_core.pydantic_v1 import BaseModel, Field | ||
from langchain_core.output_parsers import PydanticToolsParser | ||
from langchain.globals import set_debug, set_verbose | ||
import streamlit as st | ||
import requests | ||
import time | ||
import json | ||
import os | ||
|
||
from urllib3 import request | ||
|
||
model_service = os.getenv("MODEL_ENDPOINT", | ||
"http://localhost:8001") | ||
model_service = f"{model_service}/v1" | ||
|
||
@st.cache_resource(show_spinner=False) | ||
def checking_model_service(): | ||
start = time.time() | ||
print("Checking Model Service Availability...") | ||
ready = False | ||
while not ready: | ||
try: | ||
request_cpp = requests.get(f'{model_service}/models') | ||
request_ollama = requests.get(f'{model_service[:-2]}api/tags') | ||
if request_cpp.status_code == 200: | ||
server = "Llamacpp_Python" | ||
ready = True | ||
elif request_ollama.status_code == 200: | ||
server = "Ollama" | ||
ready = True | ||
except: | ||
pass | ||
time.sleep(1) | ||
print(f"{server} Model Service Available") | ||
print(f"{time.time()-start} seconds") | ||
return server | ||
|
||
def get_models(): | ||
try: | ||
response = requests.get(f"{model_service[:-2]}api/tags") | ||
return [i["name"].split(":")[0] for i in | ||
json.loads(response.content)["models"]] | ||
except: | ||
return None | ||
|
||
with st.spinner("Checking Model Service Availability..."): | ||
server = checking_model_service() | ||
|
||
def enableInput(): | ||
st.session_state["input_disabled"] = False | ||
|
||
def disableInput(): | ||
st.session_state["input_disabled"] = True | ||
|
||
st.title("💬 Function calling") | ||
if "input_disabled" not in st.session_state: | ||
enableInput() | ||
|
||
model_name = os.getenv("MODEL_NAME", "") | ||
|
||
if server == "Ollama": | ||
models = get_models() | ||
with st.sidebar: | ||
model_name = st.radio(label="Select Model", | ||
options=models) | ||
|
||
class getWeather(BaseModel): | ||
"""Get the current weather in a given latitude and longitude.""" | ||
|
||
latitude: float = Field(description="The latitude of a place") | ||
longitude: float = Field(description="The longitude of a place") | ||
|
||
#https://api.open-meteo.com/v1/forecast?latitude=52.52&longitude=13.41&hourly=temperature_2m | ||
def retrieve(self): | ||
return requests.get("https://api.open-meteo.com/v1/forecast", params={'latitude': self.latitude, 'longitude': self.longitude, 'hourly': 'temperature_2m'}).json(); | ||
|
||
llm = ChatOpenAI(base_url=model_service, | ||
api_key="sk-no-key-required", | ||
model=model_name, | ||
streaming=False, verbose=False).bind_tools(tools=[getWeather], tool_choice='auto') | ||
|
||
SYSTEM_MESSAGE=""" | ||
You are a helpful assistant. | ||
You can call functions with appropriate input when necessary. | ||
""" | ||
|
||
|
||
prompt = ChatPromptTemplate.from_messages([ | ||
("system", SYSTEM_MESSAGE), | ||
("user", "What's the weather like in {input} ?") | ||
]) | ||
|
||
chain = prompt | llm | PydanticToolsParser(tools=[getWeather]) | ||
|
||
st.markdown(""" | ||
This demo application will ask the LLM for the weather in the city given in the input field and | ||
specify a tool that can get weather information give a latitude and longitude. The weather information | ||
retrieval is implemented using open-meteo.com. | ||
""") | ||
container = st.empty() | ||
|
||
if prompt := st.chat_input(placeholder="Enter the city name:", disabled=not input): | ||
with container: | ||
st.write("Calling LLM") | ||
response = chain.invoke(prompt) | ||
with container: | ||
st.write("Retrieving weather information") | ||
temperatures = list(map(lambda r: r.retrieve(), response)) | ||
print(temperatures[0]) | ||
with container: | ||
st.line_chart(temperatures[0]['hourly'], x='time', y='temperature_2m') |
4 changes: 4 additions & 0 deletions
4
recipes/natural_language_processing/function_calling/app/requirements.txt
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
langchain==0.2.3 | ||
langchain-openai==0.1.7 | ||
langchain-community==0.2.4 | ||
streamlit==1.34.0 |