-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathchatbot.py
112 lines (88 loc) · 3.51 KB
/
chatbot.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
import nltk
from nltk.stem import WordNetLemmatizer
from nltk.corpus import stopwords
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
import langdetect
import spacy
from textblob import TextBlob
import requests
nltk.download('stopwords')
nltk.download('punkt')
class Chatbot:
def __init__(self):
self.lemmatizer = WordNetLemmatizer()
self.stop_words = stopwords.words('english')
self.corpus = self.load_corpus('corpus.txt')
self.vectorizer = TfidfVectorizer()
self.tfidf_matrix = self.vectorizer.fit_transform(self.corpus)
self.nlp = spacy.load('en_core_web_sm')
self.ner_model = spacy.load('en_core_web_sm')
self.context = []
self.api_key = api_key
def load_corpus(self, file_path):
with open(file_path, 'r') as file:
corpus = [self.preprocess_input_text(line.strip()) for line in file]
return corpus
def preprocess_input_text(self, input_text):
lang = langdetect.detect(input_text)
if lang == 'en':
input_text = input_text.lower()
words = nltk.word_tokenize(input_text)
words = [self.lemmatizer.lemmatize(word) for word in words if word not in self.stop_words]
input_text = ' '.join(words)
elif lang == 'fr':
# French preprocessing
...
elif lang == 'es':
# Spanish preprocessing
...
return input_text
def recognize_intent(self, input_text):
doc = self.nlp(input_text)
intent = None
for token in doc:
if token.pos_ == 'VERB':
intent = token.lemma_
break
return intent
def extract_entities(self, input_text):
doc = self.ner_model(input_text)
entities = []
for ent in doc.ents:
entities.append((ent.text, ent.label_))
return entities
def update_context(self, user_query, chatbot_response):
self.context.append((user_query, chatbot_response))
def analyze_sentiment(self, input_text):
blob = TextBlob(input_text)
sentiment = blob.sentiment.polarity
return sentiment
def fetch_weather(self, location):
endpoint = f"http://api.openweathermap.org/data/2.5/weather?q={location}&appid={self.api_key}"
response = requests.get(endpoint)
weather_data = response.json()
return weather_data
def handle_weather_intent(self, input_text):
# Extract location from input_text
location = self.extract_location(input_text)
if location:
weather_data = self.fetch_weather(location)
# Process weather data and generate a response
response = self.generate_response_based_on_weather(weather_data)
else:
response = 'Please provide a valid location.'
return response
def get_response(self, input_text):
input_text = self.preprocess_input_text(input_text)
intent = self.recognize_intent(input_text)
entities = self.extract_entities(input_text)
sentiment = self.analyze_sentiment(input_text)
if intent == 'order':
response = self.handle_order_intent(input_text)
elif intent == 'weather':
response = self.handle_weather_intent(input_text)
else:
response = self.generate_default_response(sentiment)
self.update_context(input_text, response)
return response