Skip to content

Commit

Permalink
🔧 update settings to use CONTEXT_LENGTH instead
Browse files Browse the repository at this point in the history
MAX_TOKENS
  • Loading branch information
shroominic committed Dec 3, 2023
1 parent dd19fb2 commit 793d2d8
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
4 changes: 2 additions & 2 deletions src/funcchain/chain/creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,8 +229,8 @@ def _crop_large_inputs(
from funcchain import settings

content_tokens = count_tokens(v)
if base_tokens + content_tokens > settings.MAX_TOKENS:
input_kwargs[k] = v[: (settings.MAX_TOKENS - base_tokens) * 2 // 3]
if base_tokens + content_tokens > settings.CONTEXT_LENGTH:
input_kwargs[k] = v[: (settings.CONTEXT_LENGTH - base_tokens) * 2 // 3]
print("Truncated: ", len(input_kwargs[k]))


Expand Down
3 changes: 2 additions & 1 deletion src/funcchain/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ class FuncchainSettings(BaseSettings):
RETRY_PARSE_SLEEP: float = 0.1

# Prompt
MAX_TOKENS: int = 4096
CONTEXT_LENGTH: int = 8196
MAX_TOKENS: int = 2048
DEFAULT_SYSTEM_PROMPT: str = ""

# KEYS
Expand Down

0 comments on commit 793d2d8

Please sign in to comment.