Skip to content

Commit

Permalink
v0.3.2 (#51)
Browse files Browse the repository at this point in the history
* docs(api.md): minor restructuring

* fix(tokens): incorrect caching of async tokenizer

* fix(deps): pin pydantic to less than v2.0

* fix(deps): update certifi to remove trustcor cert

---------

Co-authored-by: Stainless Bot <[email protected]>
  • Loading branch information
rattrayalex and stainless-bot authored Jul 1, 2023
1 parent 5d8a418 commit d617893
Show file tree
Hide file tree
Showing 4 changed files with 84 additions and 9 deletions.
7 changes: 3 additions & 4 deletions api.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# Top Level
# Anthropic

Custom Methods:
Methods:

- `count_tokens`
- <code>client.<a href="./src/anthropic/_client.py">count_tokens</a>(\*args) -> int</code>

# Completions

Expand All @@ -15,4 +15,3 @@ from anthropic.types import Completion
Methods:

- <code title="post /v1/complete">client.completions.<a href="./src/anthropic/resources/completions.py">create</a>(\*\*<a href="src/anthropic/types/completion_create_params.py">params</a>) -> <a href="./src/anthropic/types/completion.py">Completion</a></code>
- <code title="post /v1/complete">client.completions.<a href="./src/anthropic/resources/completions.py">create</a>(\*\*<a href="src/anthropic/types/completion_create_params.py">params</a>) -> <a href="./src/anthropic/types/completion.py">Completion</a></code>
8 changes: 4 additions & 4 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ packages = [
[tool.poetry.dependencies]
python = "^3.7"
httpx = ">= 0.23.0"
pydantic = ">= 1.9.0, < 2.0"
pydantic = "^1.9.0"
typing-extensions = ">= 4.1.1"
anyio = ">= 3.5.0"
distro = ">= 1.7.0"
Expand Down
76 changes: 76 additions & 0 deletions tests/test_tokenizer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import asyncio
import threading
import concurrent.futures
from multiprocessing import Pool

import pytest

from anthropic import _tokenizers


@pytest.fixture(autouse=True)
def before_test() -> None:
# clear cache
_tokenizers._tokenizer = None


def _sync_tokenizer_test() -> None:
tokenizer = _tokenizers.sync_get_tokenizer()
encoded_text = tokenizer.encode("hello world") # type: ignore
assert len(encoded_text.ids) == 2 # type: ignore


def test_threading() -> None:
failed = False

def target() -> None:
nonlocal failed

try:
_sync_tokenizer_test()
except Exception:
failed = True
raise

t1 = threading.Thread(target=target)
t2 = threading.Thread(target=target)

t1.start()
t2.start()
t1.join()
t2.join()

assert not failed


def test_concurrent_futures() -> None:
with concurrent.futures.ThreadPoolExecutor() as executor:
future1 = executor.submit(_sync_tokenizer_test)
future2 = executor.submit(_sync_tokenizer_test)

future1.result()
future2.result()


def test_multiprocessing() -> None:
with Pool(processes=10) as pool:
pool.apply(_sync_tokenizer_test)


async def _async_tokenizer_test() -> None:
tokenizer = await _tokenizers.async_get_tokenizer()
encoded_text = tokenizer.encode("hello world") # type: ignore
assert len(encoded_text.ids) == 2 # type: ignore


async def test_asyncio_tasks() -> None:
loop = asyncio.get_event_loop()

task1 = loop.create_task(_async_tokenizer_test())
task2 = loop.create_task(_async_tokenizer_test())

await asyncio.gather(task1, task2)


async def test_asyncio_gather() -> None:
await asyncio.gather(_async_tokenizer_test(), _async_tokenizer_test())

0 comments on commit d617893

Please sign in to comment.