Skip to content

Commit

Permalink
fix: ollama reraise
Browse files Browse the repository at this point in the history
  • Loading branch information
Byaidu committed Dec 20, 2024
1 parent 86c1869 commit 7692434
Showing 1 changed file with 17 additions and 29 deletions.
46 changes: 17 additions & 29 deletions pdf2zh/translator.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,37 +213,25 @@ def __init__(self, lang_in, lang_out, model, envs=None, prompt=None):
self.prompttext = prompt

def translate(self, text):
print(len(self.prompt(text, self.prompttext)))
print(self.prompt(text, self.prompttext)[0])
print(self.prompt(text, self.prompttext)[1])
maxlen = max(2000, len(text) * 3)
maxlen = max(2000, len(text) * 5)
for model in self.model.split(";"):
for i in range(2):
if i:
print("[Retry]")
try:
response = ""
try:
stream = self.client.chat(
model=model,
options=self.options,
messages=self.prompt(text, self.prompttext),
stream=True,
)
for chunk in stream:
chunk = chunk["message"]["content"]
print(chunk, end="", flush=True)
response += chunk
if len(response) > maxlen:
raise Exception("Response too long")
if not response.endswith("\n"):
print()
return response.strip()
except Exception as e:
print()
print(e)
print("[Aborted.]")
# if translation fails after multiple retries, return the original text to prevent hang-up
return text
stream = self.client.chat(
model=model,
options=self.options,
messages=self.prompt(text, self.prompttext),
stream=True,
)
for chunk in stream:
chunk = chunk["message"]["content"]
response += chunk
if len(response) > maxlen:
raise Exception("Response too long")
return response.strip()
except Exception as e:
print(e)
raise Exception("All models failed")


class OpenAITranslator(BaseTranslator):
Expand Down

0 comments on commit 7692434

Please sign in to comment.