-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathquery.py
31 lines (24 loc) · 1.11 KB
/
query.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
from langchain_chroma import Chroma
from langchain_openai import OpenAIEmbeddings, OpenAI, ChatOpenAI
from dotenv import load_dotenv
import os
from langchain_core.prompts import PromptTemplate
from langchain_core.runnables import RunnablePassthrough
from langchain_core.output_parsers import StrOutputParser
load_dotenv()
openai_api_key = os.getenv('OPENAI_API_KEY')
embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key, model="text-embedding-3-large")
vectordb = Chroma(persist_directory="./vectorstore", embedding_function=embeddings)
retriever = vectordb.as_retriever(search_type='similarity', search_kargs={'k': 1000})
llm = ChatOpenAI(model="gpt-4-turbo", openai_api_key=openai_api_key)
prompt_template = PromptTemplate.from_template("""
Mechanical, Electrical, and Plumbing (MEP) Engineering Question Answering System, maintenance related questions.
Context: {context}
Question: {question}
""")
rag_chain = (
{"context": retriever, "question": RunnablePassthrough()}
| prompt_template
| llm
| StrOutputParser()
)