forked from dheerajrhegde/PrecisionFarming
-
Notifications
You must be signed in to change notification settings - Fork 0
/
CropVectorStore.py
executable file
·50 lines (41 loc) · 2.16 KB
/
CropVectorStore.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import re
from bs4 import BeautifulSoup
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain_community.document_loaders import RecursiveUrlLoader, WebBaseLoader, PyPDFLoader
from langchain_community.vectorstores import Chroma
from langchain_community.vectorstores.utils import filter_complex_metadata
from langchain_openai import OpenAIEmbeddings
class CropVectorStore:
def bs4_extractor(self, html: str) -> str:
soup = BeautifulSoup(html, "lxml")
return re.sub(r"\n\n+", "\n\n", soup.text).strip()
def create_vector_store(self):
loader = PyPDFLoader("Guides/soybean.pdf") #RecursiveUrlLoader("https://soybeans.ces.ncsu.edu/", extractor=self.bs4_extractor)
docs = loader.load()
print("sybeans.ces.ncsu.edu", len(docs))
loader = PyPDFLoader("Guides/corn.pdf") #RecursiveUrlLoader("https://corn.ces.ncsu.edu/", extractor=self.bs4_extractor)
docs = docs + loader.load()
print("sybeans.ces.ncsu.edu + corn.ces.ncsu.edu", len(docs))
loader = PyPDFLoader("Guides/cotton.pdf") #RecursiveUrlLoader("https://cotton.ces.ncsu.edu/", extractor=self.bs4_extractor)
docs = docs + loader.load()
print("sybeans.ces.ncsu.edu + corn.ces.ncsu.edu + cottton.ces.ncsu.edu", len(docs))
for document in docs:
document.metadata["crop"] = document.metadata["source"].split("/")[1].split(".")[0]
print(docs[0].metadata, type(docs[0]))
text_splitter = RecursiveCharacterTextSplitter.from_tiktoken_encoder(
chunk_size=1024, chunk_overlap=128
)
doc_splits = text_splitter.split_documents(docs)
print("Splits done...", len(doc_splits))
# Add to vectorDB
vector_store = Chroma(
collection_name="agriculture",
embedding_function=OpenAIEmbeddings(),
persist_directory="./chroma_langchain_db", # Where to save data locally, remove if not neccesary
)
vector_store.add_documents(filter_complex_metadata(doc_splits))
vector_store.persist()
print("Vectorstore created...")
if __name__ == "__main__":
cvs = CropVectorStore()
cvs.create_vector_store()