forked from br0hit/Br0GPT
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathanswer_generator.py
130 lines (91 loc) · 4.53 KB
/
answer_generator.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
# Using a FIASS document store
import os
from haystack.document_stores import FAISSDocumentStore
# Load the saved index into a new DocumentStore instance:
# Also, provide `config_path` parameter if you set it when calling the `save()` method:
document_store = FAISSDocumentStore.load(index_path="docstore/my_index.faiss", config_path="docstore/my_config.json")
# Check if the DocumentStore is loaded correctly
assert document_store.faiss_index_factory_str == "Flat"
# Initilazing prompt node from the start to avoid delays later :
from haystack.nodes import PromptNode,PromptTemplate
from haystack.pipelines import Pipeline
# # Initializing agent and tools
# from haystack.agents import Agent, Tool
# from haystack.agents.base import ToolsManager
from haystack.nodes import EmbeddingRetriever
retriever = EmbeddingRetriever(
document_store=document_store, embedding_model="sentence-transformers/multi-qa-mpnet-base-dot-v1"
)
# This embedding retreiver gave the wrong file for "What are the different billing methdos in SD ??"
# from haystack.nodes import BM25Retriever
# retriever = BM25Retriever(document_store=document_store, top_k=2)
# qa_template = PromptTemplate(
# name="Question_and_Answer",
# prompt_text="""
# You are an AI assistant. Your task is to use the content to give a detailed and easily understandable answer
# Content: {input}\n\n
# Answer:
# """
# )
lfqa_prompt = PromptTemplate(
name="lfqa",
prompt_text="""Synthesize a comprehensive answer from the following text for the given question.
Provide a clear and concise response that summarizes the key points and information presented in the text.
Your answer should be in your own words and be no longer than 50 words.
\n\n Related text: {join(documents)} \n\n Question: {query} \n\n
Final Answer:""",
)
api_key=os.environ.get("API-KEY")
prompt_node_working = PromptNode("gpt-3.5-turbo", api_key=api_key, default_prompt_template=lfqa_prompt,model_kwargs={"stream":True})
# prompt_node_working = PromptNode("openai-gpt", default_prompt_template=lfqa_prompt,model_kwargs={"stream":True})
# prompt_node = PromptNode("distilbert-base-cased-distilled-squad",default_prompt_template=lfqa_prompt,model_kwargs={"stream":True})
# from haystack.nodes import OpenAIAnswerGenerator
# generator = OpenAIAnswerGenerator(api_key="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")
# from haystack.pipelines import GenerativeQAPipeline
# pipeline = GenerativeQAPipeline(generator=generator, retriever=retriever)
# result = pipeline.run(query='How to create a sales order', params={"Retriever": {"top_k": 1}})
query_pipeline = Pipeline()
query_pipeline.add_node(component=retriever, name="Retriever", inputs=["Query"])
query_pipeline.add_node(component=prompt_node_working, name="prompt_node", inputs=["Retriever"])
## This works perfectly for lfqa, Maybe
# Creating a funciton to integrate all this :
def question_answering_bot(input_question):
answer = query_pipeline.run(query=input_question, params={"Retriever": {"top_k": 3}})
# # Assuming 'answer' is a Document object
# response = {
# 'text': answer.text,
# 'start': answer.start,
# 'end': answer.end,
# 'score': answer.score,
# }
# return response
return answer["results"]
# # Extract the 'content' value from each document
# contents = [doc.content for doc in result['documents']]
# # Print the contents
# for content in contents:
# print(content)
# # Extract the 'content' value from each document
# contents = [doc.content for doc in result['documents']]
# # Join all the content values into a single string
# joined_content = '\n'.join(contents)
# result = prompt_node.prompt(prompt_template=qa_template, input=joined_content)
# print(result)
# query_pipeline.add_node(component=prompt_node, name="prompt_node", inputs=["Retriever"])
# hotpot_questions = [
# "What are the different billing methods ?"
# ]
# for question in hotpot_questions:
# output = query_pipeline.run(query=question)
# print(output["results"])
# while(True):
# input_question = input("Enter the quesiton which you want to ask the bot : ")
# if(input_question=="#"):
# print("thank you ")
# break
# else:
# question_answering_bot(input_question)
## Testing llms
# reply = question_answering_bot("Expalin the policies of performance security bond ?")
# print("\n\n\n RESULT\n")
# print(reply)