Skip to content

Commit

Permalink
Critical Bugfix
Browse files Browse the repository at this point in the history
  • Loading branch information
LyubomirT committed Dec 7, 2023
1 parent 92e32d9 commit 187676c
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 8 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@ __pycache__/
temp/
PYPI.md
pypi/
dist/
dist/
demo_gui.py
2 changes: 1 addition & 1 deletion demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def load_config():

def demo():
wordlist_path = "small_wordlist.txt" # Update with the actual path to your wordlist. Or use the pre-installed small wordlist.
proofreader = Proofreader(wordlist_path)
proofreader = Proofreader(wordlist_path=wordlist_path)

while True:
word = input("Enter a word: ")
Expand Down
13 changes: 7 additions & 6 deletions lesp/autocorrect.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,13 +43,14 @@ class Proofreader:
"""
def __init__(self, wordlist_path: str = "lesp-wordlist.txt", cache_file: str = "lesp_cache/lesp.cache") -> None:
self.wordlist_path: str = wordlist_path
self.wordlist: List[str] = [] # Initialize as an empty list
self.load_wordlist()
self.cache_file: str = cache_file
self.cache: dict = {}
self.wordlist: List[str] = []
if cache_file:
self.load_cache(cache_file)


def load_wordlist(self) -> None:
"""
Loads a wordlist, also can be used to dynamically switch between wordlists. The wordlist path is based on the wordlist_path attribute of the Proofreader object.
Expand Down Expand Up @@ -251,26 +252,26 @@ def get_similar(self, word: str, similarity_rate: float, chunks: int = 4, upto:

word = word.lower()
similar_words: List[str] = []
chunk_size: int = len(self.wordlist) // chunks
chunk_size = len(self.wordlist) // chunks

chunks = [(word, similarity_rate, self.wordlist[i:i + chunk_size]) for i in range(0, len(self.wordlist), chunk_size)]

if use_cache and self.cache and self.cache_file and word in self.cache:
if self.cache[word] != []:
return self.cache[word][:upto]
else:
return None

chunks: List[tuple] = [(word, similarity_rate, self.wordlist[i:i + chunk_size]) for i in range(0, len(self.wordlist), chunk_size)]

with concurrent.futures.ThreadPoolExecutor() as executor:
results: List[List[str]] = list(executor.map(Proofreader.get_similar_worker, chunks))
results: List[List[str]] = list(executor.map(Proofreader.get_similar_worker, chunks))


for similar_word_list in results:
similar_words.extend(similar_word_list)

similar_words = list(set(similar_words))

if set_cache and self.cache_file and word not in self.cache:
print("Setting cache for \"" + word + "\"")
self.cache[word] = similar_words
self.save_cache()

Expand Down

0 comments on commit 187676c

Please sign in to comment.