-
Notifications
You must be signed in to change notification settings - Fork 204
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* docs(api.md): minor restructuring * fix(tokens): incorrect caching of async tokenizer * fix(deps): pin pydantic to less than v2.0 * fix(deps): update certifi to remove trustcor cert --------- Co-authored-by: Stainless Bot <[email protected]>
- Loading branch information
1 parent
5d8a418
commit d617893
Showing
4 changed files
with
84 additions
and
9 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,76 @@ | ||
import asyncio | ||
import threading | ||
import concurrent.futures | ||
from multiprocessing import Pool | ||
|
||
import pytest | ||
|
||
from anthropic import _tokenizers | ||
|
||
|
||
@pytest.fixture(autouse=True) | ||
def before_test() -> None: | ||
# clear cache | ||
_tokenizers._tokenizer = None | ||
|
||
|
||
def _sync_tokenizer_test() -> None: | ||
tokenizer = _tokenizers.sync_get_tokenizer() | ||
encoded_text = tokenizer.encode("hello world") # type: ignore | ||
assert len(encoded_text.ids) == 2 # type: ignore | ||
|
||
|
||
def test_threading() -> None: | ||
failed = False | ||
|
||
def target() -> None: | ||
nonlocal failed | ||
|
||
try: | ||
_sync_tokenizer_test() | ||
except Exception: | ||
failed = True | ||
raise | ||
|
||
t1 = threading.Thread(target=target) | ||
t2 = threading.Thread(target=target) | ||
|
||
t1.start() | ||
t2.start() | ||
t1.join() | ||
t2.join() | ||
|
||
assert not failed | ||
|
||
|
||
def test_concurrent_futures() -> None: | ||
with concurrent.futures.ThreadPoolExecutor() as executor: | ||
future1 = executor.submit(_sync_tokenizer_test) | ||
future2 = executor.submit(_sync_tokenizer_test) | ||
|
||
future1.result() | ||
future2.result() | ||
|
||
|
||
def test_multiprocessing() -> None: | ||
with Pool(processes=10) as pool: | ||
pool.apply(_sync_tokenizer_test) | ||
|
||
|
||
async def _async_tokenizer_test() -> None: | ||
tokenizer = await _tokenizers.async_get_tokenizer() | ||
encoded_text = tokenizer.encode("hello world") # type: ignore | ||
assert len(encoded_text.ids) == 2 # type: ignore | ||
|
||
|
||
async def test_asyncio_tasks() -> None: | ||
loop = asyncio.get_event_loop() | ||
|
||
task1 = loop.create_task(_async_tokenizer_test()) | ||
task2 = loop.create_task(_async_tokenizer_test()) | ||
|
||
await asyncio.gather(task1, task2) | ||
|
||
|
||
async def test_asyncio_gather() -> None: | ||
await asyncio.gather(_async_tokenizer_test(), _async_tokenizer_test()) |