Skip to content

Commit

Permalink
Preemptive litellm fix
Browse files Browse the repository at this point in the history
  • Loading branch information
tnunamak committed Mar 20, 2024
1 parent 15de3fc commit 75bcb1c
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 7 deletions.
10 changes: 5 additions & 5 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ fastapi = "^0.109.0"
uvicorn = "^0.27.0"
humanize = "^4.9.0"
llama-cpp-python = ">=0.2.26, <0.2.56"
litellm = "^1.23.12"
litellm = "1.23.9" # Newer versions than this break macOS builds: https://github.com/BerriAI/litellm/issues/2607
txtai = {version = "^7.0.0", extras = ["pipeline-llm"]}
sse-starlette = "^2.0.0"
llama-index = "^0.10.4"
Expand Down
20 changes: 19 additions & 1 deletion selfie/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,25 @@
import logging
import warnings
import colorlog

### Preemptive fix based on suggestion in https://github.com/BerriAI/litellm/issues/2607
# import platform
# import os
#
# os_name = platform.system()
#
# if os_name == 'Darwin': # macOS
# cache_dir = os.path.expanduser('~/Library/Caches/TikToken')
# elif os_name == 'Windows':
# cache_dir = os.path.join(os.environ['APPDATA'], 'TikToken', 'Cache')
# else: # Assume Linux/Unix
# cache_dir = os.path.expanduser('~/TikToken/Cache')
#
# # LiteLLM writes to a read-only directory in the built application bundle, try to override it
# # Source: https://github.com/BerriAI/litellm/pull/1947, with the latest code here: https://github.com/BerriAI/litellm/blob/main/litellm/utils.py
# os.environ['TIKTOKEN_CACHE_DIR'] = cache_dir
#
# # Now we can safely import litellm
import litellm

# Suppress specific warnings
Expand All @@ -18,5 +37,4 @@
logging.basicConfig(level=logging.INFO)
logging.getLogger("selfie").setLevel(level=logging.INFO)


litellm.drop_params = True

0 comments on commit 75bcb1c

Please sign in to comment.