diff --git a/api.md b/api.md
index 83650cb0..a702cec8 100644
--- a/api.md
+++ b/api.md
@@ -1,8 +1,8 @@
-# Top Level
+# Anthropic
-Custom Methods:
+Methods:
-- `count_tokens`
+- client.count_tokens(\*args) -> int
# Completions
@@ -15,4 +15,3 @@ from anthropic.types import Completion
Methods:
- client.completions.create(\*\*params) -> Completion
-- client.completions.create(\*\*params) -> Completion
diff --git a/poetry.lock b/poetry.lock
index ebc71022..880e2961 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -95,13 +95,13 @@ uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "certifi"
-version = "2022.9.24"
+version = "2023.5.7"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
- {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"},
- {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"},
+ {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"},
+ {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"},
]
[[package]]
@@ -825,4 +825,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
[metadata]
lock-version = "2.0"
python-versions = "^3.7"
-content-hash = "d91f45ecd0210a0a992caef1795f664e5f846ba00ffa4f2dc88c08ba3ebbedb4"
+content-hash = "b09fa7774c59e804a189a287dc33a71974b592eb5398dfa34e428d0aba3fc503"
diff --git a/pyproject.toml b/pyproject.toml
index a2e5a23a..fc3d7ee4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -13,7 +13,7 @@ packages = [
[tool.poetry.dependencies]
python = "^3.7"
httpx = ">= 0.23.0"
-pydantic = ">= 1.9.0, < 2.0"
+pydantic = "^1.9.0"
typing-extensions = ">= 4.1.1"
anyio = ">= 3.5.0"
distro = ">= 1.7.0"
diff --git a/tests/test_tokenizer.py b/tests/test_tokenizer.py
new file mode 100644
index 00000000..87be4044
--- /dev/null
+++ b/tests/test_tokenizer.py
@@ -0,0 +1,76 @@
+import asyncio
+import threading
+import concurrent.futures
+from multiprocessing import Pool
+
+import pytest
+
+from anthropic import _tokenizers
+
+
+@pytest.fixture(autouse=True)
+def before_test() -> None:
+ # clear cache
+ _tokenizers._tokenizer = None
+
+
+def _sync_tokenizer_test() -> None:
+ tokenizer = _tokenizers.sync_get_tokenizer()
+ encoded_text = tokenizer.encode("hello world") # type: ignore
+ assert len(encoded_text.ids) == 2 # type: ignore
+
+
+def test_threading() -> None:
+ failed = False
+
+ def target() -> None:
+ nonlocal failed
+
+ try:
+ _sync_tokenizer_test()
+ except Exception:
+ failed = True
+ raise
+
+ t1 = threading.Thread(target=target)
+ t2 = threading.Thread(target=target)
+
+ t1.start()
+ t2.start()
+ t1.join()
+ t2.join()
+
+ assert not failed
+
+
+def test_concurrent_futures() -> None:
+ with concurrent.futures.ThreadPoolExecutor() as executor:
+ future1 = executor.submit(_sync_tokenizer_test)
+ future2 = executor.submit(_sync_tokenizer_test)
+
+ future1.result()
+ future2.result()
+
+
+def test_multiprocessing() -> None:
+ with Pool(processes=10) as pool:
+ pool.apply(_sync_tokenizer_test)
+
+
+async def _async_tokenizer_test() -> None:
+ tokenizer = await _tokenizers.async_get_tokenizer()
+ encoded_text = tokenizer.encode("hello world") # type: ignore
+ assert len(encoded_text.ids) == 2 # type: ignore
+
+
+async def test_asyncio_tasks() -> None:
+ loop = asyncio.get_event_loop()
+
+ task1 = loop.create_task(_async_tokenizer_test())
+ task2 = loop.create_task(_async_tokenizer_test())
+
+ await asyncio.gather(task1, task2)
+
+
+async def test_asyncio_gather() -> None:
+ await asyncio.gather(_async_tokenizer_test(), _async_tokenizer_test())