diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 00000000..6eb00725 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,9 @@ +ARG VARIANT="3.9" +FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} + +USER vscode + +RUN curl -sSf https://rye-up.com/get | RYE_VERSION="0.15.2" RYE_INSTALL_OPTION="--yes" bash +ENV PATH=/home/vscode/.rye/shims:$PATH + +RUN echo "[[ -d .venv ]] && source .venv/bin/activate" >> /home/vscode/.bashrc diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 00000000..b9da964d --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,39 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the +// README at: https://github.com/devcontainers/templates/tree/main/src/debian +{ + "name": "Debian", + "build": { + "dockerfile": "Dockerfile", + "context": ".." + }, + + "postStartCommand": "rye sync --all-features", + + "customizations": { + "vscode": { + "extensions": [ + "ms-python.python" + ], + "settings": { + "terminal.integrated.shell.linux": "/bin/bash", + "python.pythonPath": ".venv/bin/python", + "python.typeChecking": "basic", + "terminal.integrated.env.linux": { + "PATH": "/home/vscode/.rye/shims:${env:PATH}" + } + } + } + } + + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + + // Configure tool-specific properties. + // "customizations": {}, + + // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root" +} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..18fcbaa3 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,41 @@ +name: CI +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + lint: + name: lint + runs-on: ubuntu-latest + if: github.repository == 'anthropics/anthropic-sdk-python' + + steps: + - uses: actions/checkout@v3 + + - name: Install Rye + run: | + curl -sSf https://rye-up.com/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: 0.15.2 + RYE_INSTALL_OPTION: "--yes" + + - name: Install dependencies + run: | + rye sync --all-features + + - name: Run ruff + run: | + rye run check:ruff + + - name: Run type checking + run: | + rye run typecheck + + - name: Ensure importable + run: | + rye run python -c 'import anthropic' diff --git a/.python-version b/.python-version new file mode 100644 index 00000000..43077b24 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.9.18 diff --git a/.release-please-manifest.json b/.release-please-manifest.json index d04f223f..4208b5cb 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.5.1" + ".": "0.6.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 06ecd87a..c37fd707 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,43 @@ # Changelog +## 0.6.0 (2023-11-08) + +Full Changelog: [v0.5.1...v0.6.0](https://github.com/anthropics/anthropic-sdk-python/compare/v0.5.1...v0.6.0) + +### Features + +* **client:** adjust retry behavior to be exponential backoff ([#205](https://github.com/anthropics/anthropic-sdk-python/issues/205)) ([c8a4119](https://github.com/anthropics/anthropic-sdk-python/commit/c8a4119661c8ff74c7efa308963c2f187728a46f)) +* **client:** allow binary returns ([#217](https://github.com/anthropics/anthropic-sdk-python/issues/217)) ([159ddd6](https://github.com/anthropics/anthropic-sdk-python/commit/159ddd69e6c438baf9abb1e518d0c2467c8f952c)) +* **client:** improve file upload types ([#204](https://github.com/anthropics/anthropic-sdk-python/issues/204)) ([d85d1e0](https://github.com/anthropics/anthropic-sdk-python/commit/d85d1e04e36a90d43d134992ff4a5b1589aa6e0a)) +* **client:** support accessing raw response objects ([#211](https://github.com/anthropics/anthropic-sdk-python/issues/211)) ([ebe8e4a](https://github.com/anthropics/anthropic-sdk-python/commit/ebe8e4a274f21d73cbc2fbb94fe56172f335cbd2)) +* **client:** support passing BaseModels to request params at runtime ([#218](https://github.com/anthropics/anthropic-sdk-python/issues/218)) ([9f04ea6](https://github.com/anthropics/anthropic-sdk-python/commit/9f04ea6cf4a68e2ce65e8e00448b4d3de18a8dec)) +* **client:** support passing chunk size for binary responses ([#227](https://github.com/anthropics/anthropic-sdk-python/issues/227)) ([c88f01e](https://github.com/anthropics/anthropic-sdk-python/commit/c88f01ed17b505e3e8a30c8a6adc9231e096b3e2)) +* **client:** support passing httpx.Timeout to method timeout argument ([#222](https://github.com/anthropics/anthropic-sdk-python/issues/222)) ([ef58166](https://github.com/anthropics/anthropic-sdk-python/commit/ef58166e0fac68256ca8154792d2157698ed6a9d)) +* **github:** include a devcontainer setup ([#216](https://github.com/anthropics/anthropic-sdk-python/issues/216)) ([c9fee19](https://github.com/anthropics/anthropic-sdk-python/commit/c9fee192863fa5f894035ce3e1cf52a78b56895d)) +* **package:** add classifiers ([#214](https://github.com/anthropics/anthropic-sdk-python/issues/214)) ([380967e](https://github.com/anthropics/anthropic-sdk-python/commit/380967e515279482e7a93570f172f52324f8aa26)) + + +### Bug Fixes + +* **binaries:** don't synchronously block in astream_to_file ([#219](https://github.com/anthropics/anthropic-sdk-python/issues/219)) ([2a2a617](https://github.com/anthropics/anthropic-sdk-python/commit/2a2a617d6862eb83b8a671acad08825c3a20d11b)) +* prevent TypeError in Python 3.8 (ABC is not subscriptable) ([#221](https://github.com/anthropics/anthropic-sdk-python/issues/221)) ([893e885](https://github.com/anthropics/anthropic-sdk-python/commit/893e885859b5fb94d7673bfa9ad0a04434fec196)) + + +### Chores + +* **docs:** fix github links ([#225](https://github.com/anthropics/anthropic-sdk-python/issues/225)) ([dfa9935](https://github.com/anthropics/anthropic-sdk-python/commit/dfa99352291b15b8c885eb558c8b738b26d33373)) +* **internal:** fix some typos ([#223](https://github.com/anthropics/anthropic-sdk-python/issues/223)) ([9038193](https://github.com/anthropics/anthropic-sdk-python/commit/9038193db52612f756194fd735aab899bed0931f)) +* **internal:** improve github devcontainer setup ([#226](https://github.com/anthropics/anthropic-sdk-python/issues/226)) ([3cd90ab](https://github.com/anthropics/anthropic-sdk-python/commit/3cd90abe2c57375438a4209e31253f758f408b17)) +* **internal:** minor restructuring of base client ([#213](https://github.com/anthropics/anthropic-sdk-python/issues/213)) ([60dc609](https://github.com/anthropics/anthropic-sdk-python/commit/60dc609aa9c4b01b88d9c7e8d1eb35bf9561f210)) +* **internal:** remove unused int/float conversion ([#220](https://github.com/anthropics/anthropic-sdk-python/issues/220)) ([a6bf20d](https://github.com/anthropics/anthropic-sdk-python/commit/a6bf20d8cb64f13618c3122f8285d240840884f8)) +* **internal:** require explicit overrides ([#210](https://github.com/anthropics/anthropic-sdk-python/issues/210)) ([72f4339](https://github.com/anthropics/anthropic-sdk-python/commit/72f4339749f144e75e0e7dc0a7b2bb26f728044e)) + + +### Documentation + +* fix github links ([#215](https://github.com/anthropics/anthropic-sdk-python/issues/215)) ([8cbed15](https://github.com/anthropics/anthropic-sdk-python/commit/8cbed150d6e8f6ac8de8962e169ca46cdd0643c5)) +* improve to dictionary example ([#207](https://github.com/anthropics/anthropic-sdk-python/issues/207)) ([5e32c20](https://github.com/anthropics/anthropic-sdk-python/commit/5e32c201f7017c2d4aa7416d1a7de3f0c5247fcc)) + ## 0.5.1 (2023-10-20) Full Changelog: [v0.5.0...v0.5.1](https://github.com/anthropics/anthropic-sdk-python/compare/v0.5.0...v0.5.1) diff --git a/README.md b/README.md index a7c45b0c..3d607542 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ The Anthropic Python library provides convenient access to the Anthropic REST AP application. It includes type definitions for all request params and response fields, and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx). -For the AWS Bedrock API, see [`anthropic-bedrock`](github.com/anthropics/anthropic-bedrock-python). +For the AWS Bedrock API, see [`anthropic-bedrock`](https://github.com/anthropics/anthropic-bedrock-python). ## Migration from v0.2.x and below @@ -181,7 +181,7 @@ client.count_tokens('Hello world!') # 3 ## Using types -Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev), which provide helper methods for things like serializing back into JSON ([v1](https://docs.pydantic.dev/1.10/usage/models/), [v2](https://docs.pydantic.dev/latest/usage/serialization/)). To get a dictionary, call `dict(model)`. +Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev), which provide helper methods for things like serializing back into JSON ([v1](https://docs.pydantic.dev/1.10/usage/models/), [v2](https://docs.pydantic.dev/latest/usage/serialization/)). To get a dictionary, call `model.model_dump()`. Typed requests and responses provide autocomplete and documentation within your editor. If you would like to see type errors in VS Code to help catch bugs earlier, set `python.analysis.typeCheckingMode` to `basic`. @@ -325,6 +325,28 @@ if response.my_field is None: print('Got json like {"my_field": null}.') ``` +### Accessing raw response data (e.g. headers) + +The "raw" Response object can be accessed by prefixing `.with_raw_response.` to any HTTP method call. + +```py +from anthropic import Anthropic, HUMAN_PROMPT, AI_PROMPT + +anthropic = Anthropic() + +response = anthropic.completions.with_raw_response.create( + model="claude-2", + max_tokens_to_sample=300, + prompt=f"{HUMAN_PROMPT} how does a court case get to the Supreme Court?{AI_PROMPT}", +) +print(response.headers.get('X-My-Header')) + +completion = response.parse() # get the object that `completions.create()` would have returned +print(completion.completion) +``` + +These methods return an [`APIResponse`](https://github.com/anthropics/anthropic-sdk-python/tree/main/src/anthropic/_response.py) object. + ### Configuring the HTTP client You can directly override the [httpx client](https://www.python-httpx.org/api/#client) to customize it for your use case, including: diff --git a/mypy.ini b/mypy.ini index 8ea2d5af..b57646b0 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,7 +1,11 @@ [mypy] pretty = True show_error_codes = True -exclude = _dev + +# Exclude _files.py because mypy isn't smart enough to apply +# the correct type narrowing and as this is an internal module +# it's fine to just use Pyright. +exclude = ^(src/anthropic/_files\.py|_dev/.*\.py)$ strict_equality = True implicit_reexport = True diff --git a/pyproject.toml b/pyproject.toml index b240a656..a46e7648 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "anthropic" -version = "0.5.1" +version = "0.6.0" description = "Client library for the anthropic API" readme = "README.md" license = "MIT" @@ -16,6 +16,22 @@ dependencies = [ "tokenizers >= 0.13.0" ] requires-python = ">= 3.7" +classifiers = [ + "Typing :: Typed", + "Intended Audience :: Developers", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Operating System :: POSIX", + "Operating System :: MacOS", + "Operating System :: POSIX :: Linux", + "Operating System :: Microsoft :: Windows", + "Topic :: Software Development :: Libraries :: Python Modules", +] @@ -38,7 +54,8 @@ dev-dependencies = [ "isort==5.10.1", "time-machine==2.9.0", "nox==2023.4.22", - + "dirty-equals>=0.6.0", + ] [tool.rye.scripts] @@ -53,6 +70,16 @@ format = { chain = [ "format:ruff" = "ruff --fix ." "format:isort" = "isort ." +"check:ruff" = "ruff ." + +typecheck = { chain = [ + "typecheck:pyright", + "typecheck:mypy" +]} +"typecheck:pyright" = "pyright" +"typecheck:verify-types" = "pyright --verifytypes anthropic --ignoreexternal" +"typecheck:mypy" = "mypy --enable-incomplete-feature=Unpack ." + [build-system] requires = ["hatchling"] build-backend = "hatchling.build" @@ -90,6 +117,9 @@ exclude = [ ".venv", ".nox", ] + +reportImplicitOverride = true + reportImportCycles = false reportPrivateUsage = false diff --git a/requirements-dev.lock b/requirements-dev.lock index 6560d3c8..b2444b0a 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -13,14 +13,15 @@ argcomplete==3.1.2 attrs==23.1.0 black==23.3.0 certifi==2023.7.22 -charset-normalizer==3.3.0 +charset-normalizer==3.3.1 click==8.1.7 colorlog==6.7.0 +dirty-equals==0.6.0 distlib==0.3.7 distro==1.8.0 exceptiongroup==1.1.3 filelock==3.12.4 -fsspec==2023.9.2 +fsspec==2023.10.0 h11==0.12.0 httpcore==0.15.0 httpx==0.23.0 @@ -43,6 +44,7 @@ pyright==1.1.332 pytest==7.1.1 pytest-asyncio==0.21.1 python-dateutil==2.8.2 +pytz==2023.3.post1 pyyaml==6.0.1 requests==2.31.0 respx==0.19.2 diff --git a/src/anthropic/_base_client.py b/src/anthropic/_base_client.py index 11f23345..b2fe2426 100644 --- a/src/anthropic/_base_client.py +++ b/src/anthropic/_base_client.py @@ -1,5 +1,6 @@ from __future__ import annotations +import os import json import time import uuid @@ -29,7 +30,7 @@ overload, ) from functools import lru_cache -from typing_extensions import Literal, get_args, get_origin +from typing_extensions import Literal, override import anyio import httpx @@ -40,6 +41,7 @@ from . import _exceptions from ._qs import Querystring +from ._files import to_httpx_files, async_to_httpx_files from ._types import ( NOT_GIVEN, Body, @@ -48,34 +50,31 @@ ModelT, Headers, Timeout, - NoneType, NotGiven, ResponseT, Transport, AnyMapping, + PostParser, ProxiesTypes, RequestFiles, AsyncTransport, RequestOptions, UnknownResponse, ModelBuilderProtocol, + BinaryResponseContent, ) from ._utils import is_dict, is_given, is_mapping from ._compat import model_copy, model_dump -from ._models import ( - BaseModel, - GenericModel, - FinalRequestOptions, - validate_type, - construct_type, +from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type +from ._response import APIResponse +from ._constants import ( + DEFAULT_LIMITS, + DEFAULT_TIMEOUT, + DEFAULT_MAX_RETRIES, + RAW_RESPONSE_HEADER, ) from ._streaming import Stream, AsyncStream -from ._exceptions import ( - APIStatusError, - APITimeoutError, - APIConnectionError, - APIResponseValidationError, -) +from ._exceptions import APIStatusError, APITimeoutError, APIConnectionError log: logging.Logger = logging.getLogger(__name__) @@ -100,19 +99,6 @@ HTTPX_DEFAULT_TIMEOUT = Timeout(5.0) -# default timeout is 10 minutes -DEFAULT_TIMEOUT = Timeout(timeout=600.0, connect=5.0) -DEFAULT_MAX_RETRIES = 2 -DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20) - - -class MissingStreamClassError(TypeError): - def __init__(self) -> None: - super().__init__( - "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `anthropic._streaming` for reference", - ) - - class PageInfo: """Stores the necesary information to build the request to retrieve the next page. @@ -157,7 +143,7 @@ class BasePage(GenericModel, Generic[ModelT]): Methods: has_next_page(): Check if there is another page available - next_page_info(): Get the necesary information to make a request for the next page + next_page_info(): Get the necessary information to make a request for the next page """ _options: FinalRequestOptions = PrivateAttr() @@ -181,6 +167,7 @@ def _params_from_url(self, url: URL) -> httpx.QueryParams: def _info_to_options(self, info: PageInfo) -> FinalRequestOptions: options = model_copy(self._options) + options._strip_raw_response_header() if not isinstance(info.params, NotGiven): options.params = {**options.params, **info.params} @@ -259,13 +246,17 @@ def __await__(self) -> Generator[Any, None, AsyncPageT]: return self._get_page().__await__() async def _get_page(self) -> AsyncPageT: - page = await self._client.request(self._page_cls, self._options) - page._set_private_attributes( # pyright: ignore[reportPrivateUsage] - model=self._model, - options=self._options, - client=self._client, - ) - return page + def _parser(resp: AsyncPageT) -> AsyncPageT: + resp._set_private_attributes( + model=self._model, + options=self._options, + client=self._client, + ) + return resp + + self._options.post_parser = _parser + + return await self._client.request(self._page_cls, self._options) async def __aiter__(self) -> AsyncIterator[ModelT]: # https://github.com/microsoft/pyright/issues/3464 @@ -316,9 +307,10 @@ async def get_next_page(self: AsyncPageT) -> AsyncPageT: _HttpxClientT = TypeVar("_HttpxClientT", bound=Union[httpx.Client, httpx.AsyncClient]) +_DefaultStreamT = TypeVar("_DefaultStreamT", bound=Union[Stream[Any], AsyncStream[Any]]) -class BaseClient(Generic[_HttpxClientT]): +class BaseClient(Generic[_HttpxClientT, _DefaultStreamT]): _client: _HttpxClientT _version: str _base_url: URL @@ -329,6 +321,7 @@ class BaseClient(Generic[_HttpxClientT]): _transport: Transport | AsyncTransport | None _strict_response_validation: bool _idempotency_header: str | None + _default_stream_cls: type[_DefaultStreamT] | None = None def __init__( self, @@ -408,18 +401,6 @@ def _build_headers(self, options: FinalRequestOptions) -> httpx.Headers: return headers - def _prepare_request( - self, - request: httpx.Request, # noqa: ARG002 - ) -> None: - """This method is used as a callback for mutating the `Request` object - after it has been constructed. - - This is useful for cases where you want to add certain headers based off of - the request properties, e.g. `url`, `method` etc. - """ - return None - def _prepare_url(self, url: str) -> URL: """ Merge a URL argument together with any 'base_url' on the client, @@ -472,7 +453,7 @@ def _build_request( kwargs["data"] = self._serialize_multipartform(json_data) # TODO: report this error to httpx - request = self._client.build_request( # pyright: ignore[reportUnknownMemberType] + return self._client.build_request( # pyright: ignore[reportUnknownMemberType] headers=headers, timeout=self.timeout if isinstance(options.timeout, NotGiven) else options.timeout, method=options.method, @@ -486,8 +467,6 @@ def _build_request( files=options.files, **kwargs, ) - self._prepare_request(request) - return request def _serialize_multipartform(self, data: Mapping[object, object]) -> dict[str, object]: items = self.qs.stringify_items( @@ -503,80 +482,28 @@ def _serialize_multipartform(self, data: Mapping[object, object]) -> dict[str, o serialized[key] = value return serialized - def _extract_stream_chunk_type(self, stream_cls: type) -> type: - args = get_args(stream_cls) - if not args: - raise TypeError( - f"Expected stream_cls to have been given a generic type argument, e.g. Stream[Foo] but received {stream_cls}", - ) - return cast(type, args[0]) - def _process_response( self, *, cast_to: Type[ResponseT], - options: FinalRequestOptions, # noqa: ARG002 + options: FinalRequestOptions, response: httpx.Response, + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, ) -> ResponseT: - if cast_to is NoneType: - return cast(ResponseT, None) - - if cast_to == str: - return cast(ResponseT, response.text) - - origin = get_origin(cast_to) or cast_to - - if inspect.isclass(origin) and issubclass(origin, httpx.Response): - # Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response - # and pass that class to our request functions. We cannot change the variance to be either - # covariant or contravariant as that makes our usage of ResponseT illegal. We could construct - # the response class ourselves but that is something that should be supported directly in httpx - # as it would be easy to incorrectly construct the Response object due to the multitude of arguments. - if cast_to != httpx.Response: - raise ValueError(f"Subclasses of httpx.Response cannot be passed to `cast_to`") - return cast(ResponseT, response) - - # The check here is necessary as we are subverting the the type system - # with casts as the relationship between TypeVars and Types are very strict - # which means we must return *exactly* what was input or transform it in a - # way that retains the TypeVar state. As we cannot do that in this function - # then we have to resort to using `cast`. At the time of writing, we know this - # to be safe as we have handled all the types that could be bound to the - # `ResponseT` TypeVar, however if that TypeVar is ever updated in the future, then - # this function would become unsafe but a type checker would not report an error. - if ( - cast_to is not UnknownResponse - and not origin is list - and not origin is dict - and not origin is Union - and not issubclass(origin, BaseModel) - ): - raise RuntimeError( - f"Invalid state, expected {cast_to} to be a subclass type of {BaseModel}, {dict}, {list} or {Union}." - ) - - # split is required to handle cases where additional information is included - # in the response, e.g. application/json; charset=utf-8 - content_type, *_ = response.headers.get("content-type").split(";") - if content_type != "application/json": - if self._strict_response_validation: - raise APIResponseValidationError( - response=response, - message=f"Expected Content-Type response header to be `application/json` but received `{content_type}` instead.", - body=response.text, - ) - - # If the API responds with content that isn't JSON then we just return - # the (decoded) text without performing any parsing so that you can still - # handle the response however you need to. - return response.text # type: ignore + api_response = APIResponse( + raw=response, + client=self, + cast_to=cast_to, + stream=stream, + stream_cls=stream_cls, + options=options, + ) - data = response.json() + if response.request.headers.get(RAW_RESPONSE_HEADER) == "true": + return cast(ResponseT, api_response) - try: - return self._process_response_data(data=data, cast_to=cast_to, response=response) - except pydantic.ValidationError as err: - raise APIResponseValidationError(response=response, body=data) from err + return api_response.parse() def _process_response_data( self, @@ -690,15 +617,15 @@ def _calculate_retry_timeout( return retry_after initial_retry_delay = 0.5 - max_retry_delay = 2.0 + max_retry_delay = 8.0 nb_retries = max_retries - remaining_retries # Apply exponential backoff, but not more than the max. - sleep_seconds = min(initial_retry_delay * pow(nb_retries - 1, 2), max_retry_delay) + sleep_seconds = min(initial_retry_delay * pow(2.0, nb_retries), max_retry_delay) # Apply some jitter, plus-or-minus half a second. - jitter = random() - 0.5 - timeout = sleep_seconds + jitter + jitter = 1 - 0.25 * random() + timeout = sleep_seconds * jitter return timeout if timeout >= 0 else 0 def _should_retry(self, response: httpx.Response) -> bool: @@ -733,7 +660,7 @@ def _idempotency_key(self) -> str: return f"stainless-python-retry-{uuid.uuid4()}" -class SyncAPIClient(BaseClient[httpx.Client]): +class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]): _client: httpx.Client _has_custom_http_client: bool _default_stream_cls: type[Stream[Any]] | None = None @@ -842,6 +769,24 @@ def __exit__( ) -> None: self.close() + def _prepare_options( + self, + options: FinalRequestOptions, # noqa: ARG002 + ) -> None: + """Hook for mutating the given options""" + return None + + def _prepare_request( + self, + request: httpx.Request, # noqa: ARG002 + ) -> None: + """This method is used as a callback for mutating the `Request` object + after it has been constructed. + This is useful for cases where you want to add certain headers based off of + the request properties, e.g. `url`, `method` etc. + """ + return None + @overload def request( self, @@ -903,8 +848,11 @@ def _request( stream: bool, stream_cls: type[_StreamT] | None, ) -> ResponseT | _StreamT: + self._prepare_options(options) + retries = self._remaining_retries(remaining_retries, options) request = self._build_request(options) + self._prepare_request(request) try: response = self._client.send(request, auth=self.custom_auth, stream=stream) @@ -929,23 +877,32 @@ def _request( raise self._make_status_error_from_response(err.response) from None except httpx.TimeoutException as err: if retries > 0: - return self._retry_request(options, cast_to, retries, stream=stream, stream_cls=stream_cls) + return self._retry_request( + options, + cast_to, + retries, + stream=stream, + stream_cls=stream_cls, + ) raise APITimeoutError(request=request) from err except Exception as err: if retries > 0: - return self._retry_request(options, cast_to, retries, stream=stream, stream_cls=stream_cls) + return self._retry_request( + options, + cast_to, + retries, + stream=stream, + stream_cls=stream_cls, + ) raise APIConnectionError(request=request) from err - if stream: - if stream_cls: - return stream_cls(cast_to=self._extract_stream_chunk_type(stream_cls), response=response, client=self) - - stream_cls = cast("type[_StreamT] | None", self._default_stream_cls) - if stream_cls is None: - raise MissingStreamClassError() - return stream_cls(cast_to=cast_to, response=response, client=self) - - return self._process_response(cast_to=cast_to, options=options, response=response) + return self._process_response( + cast_to=cast_to, + options=options, + response=response, + stream=stream, + stream_cls=stream_cls, + ) def _retry_request( self, @@ -979,13 +936,17 @@ def _request_api_list( page: Type[SyncPageT], options: FinalRequestOptions, ) -> SyncPageT: - resp = self.request(page, options, stream=False) - resp._set_private_attributes( # pyright: ignore[reportPrivateUsage] - client=self, - model=model, - options=options, - ) - return resp + def _parser(resp: SyncPageT) -> SyncPageT: + resp._set_private_attributes( + client=self, + model=model, + options=options, + ) + return resp + + options.post_parser = _parser + + return self.request(page, options, stream=False) @overload def get( @@ -1088,7 +1049,9 @@ def post( stream: bool = False, stream_cls: type[_StreamT] | None = None, ) -> ResponseT | _StreamT: - opts = FinalRequestOptions.construct(method="post", url=path, json_data=body, files=files, **options) + opts = FinalRequestOptions.construct( + method="post", url=path, json_data=body, files=to_httpx_files(files), **options + ) return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) def patch( @@ -1111,7 +1074,9 @@ def put( files: RequestFiles | None = None, options: RequestOptions = {}, ) -> ResponseT: - opts = FinalRequestOptions.construct(method="put", url=path, json_data=body, files=files, **options) + opts = FinalRequestOptions.construct( + method="put", url=path, json_data=body, files=to_httpx_files(files), **options + ) return self.request(cast_to, opts) def delete( @@ -1139,7 +1104,7 @@ def get_api_list( return self._request_api_list(model, page, opts) -class AsyncAPIClient(BaseClient[httpx.AsyncClient]): +class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]): _client: httpx.AsyncClient _has_custom_http_client: bool _default_stream_cls: type[AsyncStream[Any]] | None = None @@ -1245,6 +1210,24 @@ async def __aexit__( ) -> None: await self.close() + async def _prepare_options( + self, + options: FinalRequestOptions, # noqa: ARG002 + ) -> None: + """Hook for mutating the given options""" + return None + + async def _prepare_request( + self, + request: httpx.Request, # noqa: ARG002 + ) -> None: + """This method is used as a callback for mutating the `Request` object + after it has been constructed. + This is useful for cases where you want to add certain headers based off of + the request properties, e.g. `url`, `method` etc. + """ + return None + @overload async def request( self, @@ -1306,8 +1289,11 @@ async def _request( stream_cls: type[_AsyncStreamT] | None, remaining_retries: int | None, ) -> ResponseT | _AsyncStreamT: + await self._prepare_options(options) + retries = self._remaining_retries(remaining_retries, options) request = self._build_request(options) + await self._prepare_request(request) try: response = await self._client.send(request, auth=self.custom_auth, stream=stream) @@ -1349,16 +1335,13 @@ async def _request( return await self._retry_request(options, cast_to, retries, stream=stream, stream_cls=stream_cls) raise APIConnectionError(request=request) from err - if stream: - if stream_cls: - return stream_cls(cast_to=self._extract_stream_chunk_type(stream_cls), response=response, client=self) - - stream_cls = cast("type[_AsyncStreamT] | None", self._default_stream_cls) - if stream_cls is None: - raise MissingStreamClassError() - return stream_cls(cast_to=cast_to, response=response, client=self) - - return self._process_response(cast_to=cast_to, options=options, response=response) + return self._process_response( + cast_to=cast_to, + options=options, + response=response, + stream=stream, + stream_cls=stream_cls, + ) async def _retry_request( self, @@ -1491,7 +1474,9 @@ async def post( stream: bool = False, stream_cls: type[_AsyncStreamT] | None = None, ) -> ResponseT | _AsyncStreamT: - opts = FinalRequestOptions.construct(method="post", url=path, json_data=body, files=files, **options) + opts = FinalRequestOptions.construct( + method="post", url=path, json_data=body, files=await async_to_httpx_files(files), **options + ) return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) async def patch( @@ -1514,7 +1499,9 @@ async def put( files: RequestFiles | None = None, options: RequestOptions = {}, ) -> ResponseT: - opts = FinalRequestOptions.construct(method="put", url=path, json_data=body, files=files, **options) + opts = FinalRequestOptions.construct( + method="put", url=path, json_data=body, files=await async_to_httpx_files(files), **options + ) return await self.request(cast_to, opts) async def delete( @@ -1550,7 +1537,8 @@ def make_request_options( extra_query: Query | None = None, extra_body: Body | None = None, idempotency_key: str | None = None, - timeout: float | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + post_parser: PostParser | NotGiven = NOT_GIVEN, ) -> RequestOptions: """Create a dict of type RequestOptions without keys of NotGiven values.""" options: RequestOptions = {} @@ -1572,6 +1560,10 @@ def make_request_options( if idempotency_key is not None: options["idempotency_key"] = idempotency_key + if is_given(post_parser): + # internal + options["post_parser"] = post_parser # type: ignore + return options @@ -1579,6 +1571,7 @@ class OtherPlatform: def __init__(self, name: str) -> None: self.name = name + @override def __str__(self) -> str: return f"Other:{self.name}" @@ -1640,6 +1633,7 @@ class OtherArch: def __init__(self, name: str) -> None: self.name = name + @override def __str__(self) -> str: return f"other:{self.name}" @@ -1680,3 +1674,105 @@ def _merge_mappings( """ merged = {**obj1, **obj2} return {key: value for key, value in merged.items() if not isinstance(value, Omit)} + + +class HttpxBinaryResponseContent(BinaryResponseContent): + response: httpx.Response + + def __init__(self, response: httpx.Response) -> None: + self.response = response + + @property + @override + def content(self) -> bytes: + return self.response.content + + @property + @override + def text(self) -> str: + return self.response.text + + @property + @override + def encoding(self) -> Optional[str]: + return self.response.encoding + + @property + @override + def charset_encoding(self) -> Optional[str]: + return self.response.charset_encoding + + @override + def json(self, **kwargs: Any) -> Any: + return self.response.json(**kwargs) + + @override + def read(self) -> bytes: + return self.response.read() + + @override + def iter_bytes(self, chunk_size: Optional[int] = None) -> Iterator[bytes]: + return self.response.iter_bytes(chunk_size) + + @override + def iter_text(self, chunk_size: Optional[int] = None) -> Iterator[str]: + return self.response.iter_text(chunk_size) + + @override + def iter_lines(self) -> Iterator[str]: + return self.response.iter_lines() + + @override + def iter_raw(self, chunk_size: Optional[int] = None) -> Iterator[bytes]: + return self.response.iter_raw(chunk_size) + + @override + def stream_to_file( + self, + file: str | os.PathLike[str], + *, + chunk_size: int | None = None, + ) -> None: + with open(file, mode="wb") as f: + for data in self.response.iter_bytes(chunk_size): + f.write(data) + + @override + def close(self) -> None: + return self.response.close() + + @override + async def aread(self) -> bytes: + return await self.response.aread() + + @override + async def aiter_bytes(self, chunk_size: Optional[int] = None) -> AsyncIterator[bytes]: + return self.response.aiter_bytes(chunk_size) + + @override + async def aiter_text(self, chunk_size: Optional[int] = None) -> AsyncIterator[str]: + return self.response.aiter_text(chunk_size) + + @override + async def aiter_lines(self) -> AsyncIterator[str]: + return self.response.aiter_lines() + + @override + async def aiter_raw(self, chunk_size: Optional[int] = None) -> AsyncIterator[bytes]: + return self.response.aiter_raw(chunk_size) + + @override + async def astream_to_file( + self, + file: str | os.PathLike[str], + *, + chunk_size: int | None = None, + ) -> None: + path = anyio.Path(file) + async with await path.open(mode="wb") as f: + async for data in self.response.aiter_bytes(chunk_size): + await f.write(data) + + @override + async def aclose(self) -> None: + return await self.response.aclose() diff --git a/src/anthropic/_client.py b/src/anthropic/_client.py index 07b32a97..62f80393 100644 --- a/src/anthropic/_client.py +++ b/src/anthropic/_client.py @@ -5,6 +5,7 @@ import os import asyncio from typing import Union, Mapping +from typing_extensions import override import httpx from tokenizers import Tokenizer # type: ignore[import] @@ -50,6 +51,7 @@ class Anthropic(SyncAPIClient): completions: resources.Completions + with_raw_response: AnthropicWithRawResponse # client options api_key: str | None @@ -121,12 +123,15 @@ def __init__( self._default_stream_cls = Stream self.completions = resources.Completions(self) + self.with_raw_response = AnthropicWithRawResponse(self) @property + @override def qs(self) -> Querystring: return Querystring(array_format="comma") @property + @override def auth_headers(self) -> dict[str, str]: if self._api_key_auth: return self._api_key_auth @@ -149,6 +154,7 @@ def _bearer_auth(self) -> dict[str, str]: return {"Authorization": f"Bearer {auth_token}"} @property + @override def default_headers(self) -> dict[str, str | Omit]: return { **super().default_headers, @@ -156,6 +162,7 @@ def default_headers(self) -> dict[str, str | Omit]: **self._custom_headers, } + @override def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None: if self.api_key and headers.get("X-Api-Key"): return @@ -267,6 +274,7 @@ def count_tokens( def get_tokenizer(self) -> Tokenizer: return sync_get_tokenizer() + @override def _make_status_error( self, err_msg: str, @@ -302,6 +310,7 @@ def _make_status_error( class AsyncAnthropic(AsyncAPIClient): completions: resources.AsyncCompletions + with_raw_response: AsyncAnthropicWithRawResponse # client options api_key: str | None @@ -373,12 +382,15 @@ def __init__( self._default_stream_cls = AsyncStream self.completions = resources.AsyncCompletions(self) + self.with_raw_response = AsyncAnthropicWithRawResponse(self) @property + @override def qs(self) -> Querystring: return Querystring(array_format="comma") @property + @override def auth_headers(self) -> dict[str, str]: if self._api_key_auth: return self._api_key_auth @@ -401,6 +413,7 @@ def _bearer_auth(self) -> dict[str, str]: return {"Authorization": f"Bearer {auth_token}"} @property + @override def default_headers(self) -> dict[str, str | Omit]: return { **super().default_headers, @@ -408,6 +421,7 @@ def default_headers(self) -> dict[str, str | Omit]: **self._custom_headers, } + @override def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None: if self.api_key and headers.get("X-Api-Key"): return @@ -522,6 +536,7 @@ async def count_tokens( async def get_tokenizer(self) -> Tokenizer: return await async_get_tokenizer() + @override def _make_status_error( self, err_msg: str, @@ -555,6 +570,16 @@ def _make_status_error( return APIStatusError(err_msg, response=response, body=body) +class AnthropicWithRawResponse: + def __init__(self, client: Anthropic) -> None: + self.completions = resources.CompletionsWithRawResponse(client.completions) + + +class AsyncAnthropicWithRawResponse: + def __init__(self, client: AsyncAnthropic) -> None: + self.completions = resources.AsyncCompletionsWithRawResponse(client.completions) + + Client = Anthropic AsyncClient = AsyncAnthropic diff --git a/src/anthropic/_constants.py b/src/anthropic/_constants.py index 1ed9dc5b..7343a7a4 100644 --- a/src/anthropic/_constants.py +++ b/src/anthropic/_constants.py @@ -1,5 +1,14 @@ # File generated from our OpenAPI spec by Stainless. +import httpx + +RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response" + +# default timeout is 10 minutes +DEFAULT_TIMEOUT = httpx.Timeout(timeout=600.0, connect=5.0) +DEFAULT_MAX_RETRIES = 2 +DEFAULT_LIMITS = httpx.Limits(max_connections=100, max_keepalive_connections=20) + HUMAN_PROMPT = "\n\nHuman:" AI_PROMPT = "\n\nAssistant:" diff --git a/src/anthropic/_files.py b/src/anthropic/_files.py new file mode 100644 index 00000000..b6e8af8b --- /dev/null +++ b/src/anthropic/_files.py @@ -0,0 +1,122 @@ +from __future__ import annotations + +import io +import os +import pathlib +from typing import overload +from typing_extensions import TypeGuard + +import anyio + +from ._types import ( + FileTypes, + FileContent, + RequestFiles, + HttpxFileTypes, + HttpxFileContent, + HttpxRequestFiles, +) +from ._utils import is_tuple_t, is_mapping_t, is_sequence_t + + +def is_file_content(obj: object) -> TypeGuard[FileContent]: + return ( + isinstance(obj, bytes) or isinstance(obj, tuple) or isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike) + ) + + +def assert_is_file_content(obj: object, *, key: str | None = None) -> None: + if not is_file_content(obj): + prefix = f"Expected entry at `{key}`" if key is not None else f"Expected file input `{obj!r}`" + raise RuntimeError( + f"{prefix} to be bytes, an io.IOBase instance, PathLike or a tuple but received {type(obj)} instead." + ) from None + + +@overload +def to_httpx_files(files: None) -> None: + ... + + +@overload +def to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: + ... + + +def to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: + if files is None: + return None + + if is_mapping_t(files): + files = {key: _transform_file(file) for key, file in files.items()} + elif is_sequence_t(files): + files = [(key, _transform_file(file)) for key, file in files] + else: + raise TypeError(f"Unexpected file type input {type(files)}, expected mapping or sequence") + + return files + + +def _transform_file(file: FileTypes) -> HttpxFileTypes: + if is_file_content(file): + if isinstance(file, os.PathLike): + path = pathlib.Path(file) + return (path.name, path.read_bytes()) + + return file + + if is_tuple_t(file): + return (file[0], _read_file_content(file[1]), *file[2:]) + + raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") + + +def _read_file_content(file: FileContent) -> HttpxFileContent: + if isinstance(file, os.PathLike): + return pathlib.Path(file).read_bytes() + return file + + +@overload +async def async_to_httpx_files(files: None) -> None: + ... + + +@overload +async def async_to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: + ... + + +async def async_to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: + if files is None: + return None + + if is_mapping_t(files): + files = {key: await _async_transform_file(file) for key, file in files.items()} + elif is_sequence_t(files): + files = [(key, await _async_transform_file(file)) for key, file in files] + else: + raise TypeError("Unexpected file type input {type(files)}, expected mapping or sequence") + + return files + + +async def _async_transform_file(file: FileTypes) -> HttpxFileTypes: + if is_file_content(file): + if isinstance(file, os.PathLike): + path = anyio.Path(file) + return (path.name, await path.read_bytes()) + + return file + + if is_tuple_t(file): + return (file[0], await _async_read_file_content(file[1]), *file[2:]) + + raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") + + +async def _async_read_file_content(file: FileContent) -> HttpxFileContent: + if isinstance(file, os.PathLike): + return await anyio.Path(file).read_bytes() + + return file diff --git a/src/anthropic/_models.py b/src/anthropic/_models.py index f663155c..00d787ca 100644 --- a/src/anthropic/_models.py +++ b/src/anthropic/_models.py @@ -1,9 +1,19 @@ from __future__ import annotations import inspect -from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, cast +from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, cast from datetime import date, datetime -from typing_extensions import Literal, ClassVar, Protocol, final, runtime_checkable +from typing_extensions import ( + Unpack, + Literal, + ClassVar, + Protocol, + Required, + TypedDict, + final, + override, + runtime_checkable, +) import pydantic import pydantic.generics @@ -18,9 +28,16 @@ Timeout, NotGiven, AnyMapping, - RequestFiles, + HttpxRequestFiles, +) +from ._utils import ( + is_list, + is_given, + is_mapping, + parse_date, + parse_datetime, + strip_not_given, ) -from ._utils import is_list, is_mapping, parse_date, parse_datetime, strip_not_given from ._compat import PYDANTIC_V2, ConfigDict from ._compat import GenericModel as BaseGenericModel from ._compat import ( @@ -33,6 +50,7 @@ get_model_fields, field_get_default, ) +from ._constants import RAW_RESPONSE_HEADER __all__ = ["BaseModel", "GenericModel"] @@ -50,6 +68,7 @@ class BaseModel(pydantic.BaseModel): else: @property + @override def model_fields_set(self) -> set[str]: # a forwards-compat shim for pydantic v2 return self.__fields_set__ # type: ignore @@ -57,6 +76,7 @@ def model_fields_set(self) -> set[str]: class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] extra: Any = pydantic.Extra.allow # type: ignore + @override def __str__(self) -> str: # mypy complains about an invalid self arg return f'{self.__repr_name__()}({self.__repr_str__(", ")})' # type: ignore[misc] @@ -64,6 +84,7 @@ def __str__(self) -> str: # Override the 'construct' method in a way that supports recursive parsing without validation. # Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836. @classmethod + @override def construct( cls: Type[ModelT], _fields_set: set[str] | None = None, @@ -130,6 +151,7 @@ def construct( # a specifc pydantic version as some users may not know which # pydantic version they are currently using + @override def model_dump( self, *, @@ -178,6 +200,7 @@ def model_dump( exclude_none=exclude_none, ) + @override def model_dump_json( self, *, @@ -290,16 +313,13 @@ def construct_type(*, value: object, type_: type) -> object: return [construct_type(value=entry, type_=inner_type) for entry in value] if origin == float: - try: - return float(cast(Any, value)) - except Exception: - return value + if isinstance(value, int): + coerced = float(value) + if coerced != value: + return value + return coerced - if origin == int: - try: - return int(cast(Any, value)) - except Exception: - return value + return value if type_ == datetime: try: @@ -363,6 +383,19 @@ def _create_pydantic_model(type_: _T) -> Type[RootModel[_T]]: return RootModel[type_] # type: ignore +class FinalRequestOptionsInput(TypedDict, total=False): + method: Required[str] + url: Required[str] + params: Query + headers: Headers + max_retries: int + timeout: float | Timeout | None + files: HttpxRequestFiles | None + idempotency_key: str + json_data: Body + extra_json: AnyMapping + + @final class FinalRequestOptions(pydantic.BaseModel): method: str @@ -371,8 +404,9 @@ class FinalRequestOptions(pydantic.BaseModel): headers: Union[Headers, NotGiven] = NotGiven() max_retries: Union[int, NotGiven] = NotGiven() timeout: Union[float, Timeout, None, NotGiven] = NotGiven() - files: Union[RequestFiles, None] = None + files: Union[HttpxRequestFiles, None] = None idempotency_key: Union[str, None] = None + post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven() # It should be noted that we cannot use `json` here as that would override # a BaseModel method in an incompatible fashion. @@ -391,15 +425,25 @@ def get_max_retries(self, max_retries: int) -> int: return max_retries return self.max_retries + def _strip_raw_response_header(self) -> None: + if not is_given(self.headers): + return + + if self.headers.get(RAW_RESPONSE_HEADER): + self.headers = {**self.headers} + self.headers.pop(RAW_RESPONSE_HEADER) + # override the `construct` method so that we can run custom transformations. # this is necessary as we don't want to do any actual runtime type checking # (which means we can't use validators) but we do want to ensure that `NotGiven` # values are not present + # + # type ignore required because we're adding explicit types to `**values` @classmethod - def construct( + def construct( # type: ignore cls, _fields_set: set[str] | None = None, - **values: Any, + **values: Unpack[FinalRequestOptionsInput], ) -> FinalRequestOptions: kwargs: dict[str, Any] = { # we unconditionally call `strip_not_given` on any value diff --git a/src/anthropic/_response.py b/src/anthropic/_response.py new file mode 100644 index 00000000..e5000ab6 --- /dev/null +++ b/src/anthropic/_response.py @@ -0,0 +1,252 @@ +from __future__ import annotations + +import inspect +import datetime +import functools +from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast +from typing_extensions import Awaitable, ParamSpec, get_args, override, get_origin + +import httpx +import pydantic + +from ._types import NoneType, UnknownResponse, BinaryResponseContent +from ._utils import is_given +from ._models import BaseModel +from ._constants import RAW_RESPONSE_HEADER +from ._exceptions import APIResponseValidationError + +if TYPE_CHECKING: + from ._models import FinalRequestOptions + from ._base_client import Stream, BaseClient, AsyncStream + + +P = ParamSpec("P") +R = TypeVar("R") + + +class APIResponse(Generic[R]): + _cast_to: type[R] + _client: BaseClient[Any, Any] + _parsed: R | None + _stream: bool + _stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None + _options: FinalRequestOptions + + http_response: httpx.Response + + def __init__( + self, + *, + raw: httpx.Response, + cast_to: type[R], + client: BaseClient[Any, Any], + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + options: FinalRequestOptions, + ) -> None: + self._cast_to = cast_to + self._client = client + self._parsed = None + self._stream = stream + self._stream_cls = stream_cls + self._options = options + self.http_response = raw + + def parse(self) -> R: + if self._parsed is not None: + return self._parsed + + parsed = self._parse() + if is_given(self._options.post_parser): + parsed = self._options.post_parser(parsed) + + self._parsed = parsed + return parsed + + @property + def headers(self) -> httpx.Headers: + return self.http_response.headers + + @property + def http_request(self) -> httpx.Request: + return self.http_response.request + + @property + def status_code(self) -> int: + return self.http_response.status_code + + @property + def url(self) -> httpx.URL: + return self.http_response.url + + @property + def method(self) -> str: + return self.http_request.method + + @property + def content(self) -> bytes: + return self.http_response.content + + @property + def text(self) -> str: + return self.http_response.text + + @property + def http_version(self) -> str: + return self.http_response.http_version + + @property + def elapsed(self) -> datetime.timedelta: + """The time taken for the complete request/response cycle to complete.""" + return self.http_response.elapsed + + def _parse(self) -> R: + if self._stream: + if self._stream_cls: + return cast( + R, + self._stream_cls( + cast_to=_extract_stream_chunk_type(self._stream_cls), + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + stream_cls = cast("type[Stream[Any]] | type[AsyncStream[Any]] | None", self._client._default_stream_cls) + if stream_cls is None: + raise MissingStreamClassError() + + return cast( + R, + stream_cls( + cast_to=self._cast_to, + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + cast_to = self._cast_to + if cast_to is NoneType: + return cast(R, None) + + response = self.http_response + if cast_to == str: + return cast(R, response.text) + + origin = get_origin(cast_to) or cast_to + + if inspect.isclass(origin) and issubclass(origin, BinaryResponseContent): + return cast(R, cast_to(response)) # type: ignore + + if origin == APIResponse: + raise RuntimeError("Unexpected state - cast_to is `APIResponse`") + + if inspect.isclass(origin) and issubclass(origin, httpx.Response): + # Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response + # and pass that class to our request functions. We cannot change the variance to be either + # covariant or contravariant as that makes our usage of ResponseT illegal. We could construct + # the response class ourselves but that is something that should be supported directly in httpx + # as it would be easy to incorrectly construct the Response object due to the multitude of arguments. + if cast_to != httpx.Response: + raise ValueError(f"Subclasses of httpx.Response cannot be passed to `cast_to`") + return cast(R, response) + + # The check here is necessary as we are subverting the the type system + # with casts as the relationship between TypeVars and Types are very strict + # which means we must return *exactly* what was input or transform it in a + # way that retains the TypeVar state. As we cannot do that in this function + # then we have to resort to using `cast`. At the time of writing, we know this + # to be safe as we have handled all the types that could be bound to the + # `ResponseT` TypeVar, however if that TypeVar is ever updated in the future, then + # this function would become unsafe but a type checker would not report an error. + if ( + cast_to is not UnknownResponse + and not origin is list + and not origin is dict + and not origin is Union + and not issubclass(origin, BaseModel) + ): + raise RuntimeError( + f"Invalid state, expected {cast_to} to be a subclass type of {BaseModel}, {dict}, {list} or {Union}." + ) + + # split is required to handle cases where additional information is included + # in the response, e.g. application/json; charset=utf-8 + content_type, *_ = response.headers.get("content-type").split(";") + if content_type != "application/json": + if self._client._strict_response_validation: + raise APIResponseValidationError( + response=response, + message=f"Expected Content-Type response header to be `application/json` but received `{content_type}` instead.", + body=response.text, + ) + + # If the API responds with content that isn't JSON then we just return + # the (decoded) text without performing any parsing so that you can still + # handle the response however you need to. + return response.text # type: ignore + + data = response.json() + + try: + return self._client._process_response_data( + data=data, + cast_to=cast_to, # type: ignore + response=response, + ) + except pydantic.ValidationError as err: + raise APIResponseValidationError(response=response, body=data) from err + + @override + def __repr__(self) -> str: + return f"" + + +class MissingStreamClassError(TypeError): + def __init__(self) -> None: + super().__init__( + "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `anthropic._streaming` for reference", + ) + + +def _extract_stream_chunk_type(stream_cls: type) -> type: + args = get_args(stream_cls) + if not args: + raise TypeError( + f"Expected stream_cls to have been given a generic type argument, e.g. Stream[Foo] but received {stream_cls}", + ) + return cast(type, args[0]) + + +def to_raw_response_wrapper(func: Callable[P, R]) -> Callable[P, APIResponse[R]]: + """Higher order function that takes one of our bound API methods and wraps it + to support returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> APIResponse[R]: + extra_headers = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "true" + + kwargs["extra_headers"] = extra_headers + + return cast(APIResponse[R], func(*args, **kwargs)) + + return wrapped + + +def async_to_raw_response_wrapper(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[APIResponse[R]]]: + """Higher order function that takes one of our bound API methods and wraps it + to support returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + async def wrapped(*args: P.args, **kwargs: P.kwargs) -> APIResponse[R]: + extra_headers = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "true" + + kwargs["extra_headers"] = extra_headers + + return cast(APIResponse[R], await func(*args, **kwargs)) + + return wrapped diff --git a/src/anthropic/_streaming.py b/src/anthropic/_streaming.py index 510e290b..e399257e 100644 --- a/src/anthropic/_streaming.py +++ b/src/anthropic/_streaming.py @@ -3,6 +3,7 @@ import json from typing import TYPE_CHECKING, Any, Generic, Iterator, AsyncIterator +from typing_extensions import override import httpx @@ -161,6 +162,7 @@ def data(self) -> str: def json(self) -> Any: return json.loads(self.data) + @override def __repr__(self) -> str: return f"ServerSentEvent(event={self.event}, data={self.data}, id={self.id}, retry={self.retry})" diff --git a/src/anthropic/_types.py b/src/anthropic/_types.py index b84b8c22..fbd6e3af 100644 --- a/src/anthropic/_types.py +++ b/src/anthropic/_types.py @@ -1,5 +1,7 @@ from __future__ import annotations +from os import PathLike +from abc import ABC, abstractmethod from typing import ( IO, TYPE_CHECKING, @@ -11,12 +13,21 @@ Union, Mapping, TypeVar, + Callable, + Iterator, Optional, Sequence, + AsyncIterator, +) +from typing_extensions import ( + Literal, + Protocol, + TypeAlias, + TypedDict, + override, + runtime_checkable, ) -from typing_extensions import Literal, Protocol, TypeAlias, TypedDict, runtime_checkable -import httpx import pydantic from httpx import URL, Proxy, Timeout, Response, BaseTransport, AsyncBaseTransport @@ -31,10 +42,170 @@ ModelT = TypeVar("ModelT", bound=pydantic.BaseModel) _T = TypeVar("_T") + +class BinaryResponseContent(ABC): + def __init__( + self, + response: Any, + ) -> None: + ... + + @property + @abstractmethod + def content(self) -> bytes: + pass + + @property + @abstractmethod + def text(self) -> str: + pass + + @property + @abstractmethod + def encoding(self) -> Optional[str]: + """ + Return an encoding to use for decoding the byte content into text. + The priority for determining this is given by... + + * `.encoding = <>` has been set explicitly. + * The encoding as specified by the charset parameter in the Content-Type header. + * The encoding as determined by `default_encoding`, which may either be + a string like "utf-8" indicating the encoding to use, or may be a callable + which enables charset autodetection. + """ + pass + + @property + @abstractmethod + def charset_encoding(self) -> Optional[str]: + """ + Return the encoding, as specified by the Content-Type header. + """ + pass + + @abstractmethod + def json(self, **kwargs: Any) -> Any: + pass + + @abstractmethod + def read(self) -> bytes: + """ + Read and return the response content. + """ + pass + + @abstractmethod + def iter_bytes(self, chunk_size: Optional[int] = None) -> Iterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, and brotli encoded responses. + """ + pass + + @abstractmethod + def iter_text(self, chunk_size: Optional[int] = None) -> Iterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + pass + + @abstractmethod + def iter_lines(self) -> Iterator[str]: + pass + + @abstractmethod + def iter_raw(self, chunk_size: Optional[int] = None) -> Iterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + pass + + @abstractmethod + def stream_to_file( + self, + file: str | PathLike[str], + *, + chunk_size: int | None = None, + ) -> None: + """ + Stream the output to the given file. + """ + pass + + @abstractmethod + def close(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + pass + + @abstractmethod + async def aread(self) -> bytes: + """ + Read and return the response content. + """ + pass + + @abstractmethod + async def aiter_bytes(self, chunk_size: Optional[int] = None) -> AsyncIterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, and brotli encoded responses. + """ + pass + + @abstractmethod + async def aiter_text(self, chunk_size: Optional[int] = None) -> AsyncIterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + pass + + @abstractmethod + async def aiter_lines(self) -> AsyncIterator[str]: + pass + + @abstractmethod + async def aiter_raw(self, chunk_size: Optional[int] = None) -> AsyncIterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + pass + + @abstractmethod + async def astream_to_file( + self, + file: str | PathLike[str], + *, + chunk_size: int | None = None, + ) -> None: + """ + Stream the output to the given file. + """ + pass + + @abstractmethod + async def aclose(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + pass + + # Approximates httpx internal ProxiesTypes and RequestFiles types +# while adding support for `PathLike` instances ProxiesDict = Dict["str | URL", Union[None, str, URL, Proxy]] ProxiesTypes = Union[str, Proxy, ProxiesDict] -FileContent = Union[IO[bytes], bytes] +if TYPE_CHECKING: + FileContent = Union[IO[bytes], bytes, PathLike[str]] +else: + FileContent = Union[IO[bytes], bytes, PathLike] # PathLike is not subscriptable in Python 3.8. FileTypes = Union[ # file (or bytes) FileContent, @@ -47,6 +218,19 @@ ] RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] +# duplicate of the above but without our custom file support +HttpxFileContent = Union[IO[bytes], bytes] +HttpxFileTypes = Union[ + # file (or bytes) + HttpxFileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], HttpxFileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], HttpxFileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], HttpxFileContent, Optional[str], Mapping[str, str]], +] +HttpxRequestFiles = Union[Mapping[str, HttpxFileTypes], Sequence[Tuple[str, HttpxFileTypes]]] # Workaround to support (cast_to: Type[ResponseT]) -> ResponseT # where ResponseT includes `None`. In order to support directly @@ -104,6 +288,7 @@ def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ... def __bool__(self) -> Literal[False]: return False + @override def __repr__(self) -> str: return "NOT_GIVEN" @@ -157,7 +342,7 @@ def get(self, __key: str) -> str | None: ResponseT = TypeVar( "ResponseT", - bound="Union[str, None, BaseModel, List[Any], Dict[str, Any], httpx.Response, UnknownResponse, ModelBuilderProtocol]", + bound="Union[str, None, BaseModel, List[Any], Dict[str, Any], Response, UnknownResponse, ModelBuilderProtocol, BinaryResponseContent]", ) StrBytesIntFloat = Union[str, bytes, int, float] @@ -165,3 +350,5 @@ def get(self, __key: str) -> str | None: # Note: copied from Pydantic # https://github.com/pydantic/pydantic/blob/32ea570bf96e84234d2992e1ddf40ab8a565925a/pydantic/main.py#L49 IncEx: TypeAlias = "set[int] | set[str] | dict[int, Any] | dict[str, Any] | None" + +PostParser = Callable[[Any], Any] diff --git a/src/anthropic/_utils/__init__.py b/src/anthropic/_utils/__init__.py index 26dc560b..d3397212 100644 --- a/src/anthropic/_utils/__init__.py +++ b/src/anthropic/_utils/__init__.py @@ -3,13 +3,18 @@ from ._utils import is_dict as is_dict from ._utils import is_list as is_list from ._utils import is_given as is_given +from ._utils import is_tuple as is_tuple from ._utils import is_mapping as is_mapping +from ._utils import is_tuple_t as is_tuple_t from ._utils import parse_date as parse_date +from ._utils import is_sequence as is_sequence from ._utils import coerce_float as coerce_float from ._utils import is_list_type as is_list_type +from ._utils import is_mapping_t as is_mapping_t from ._utils import removeprefix as removeprefix from ._utils import removesuffix as removesuffix from ._utils import extract_files as extract_files +from ._utils import is_sequence_t as is_sequence_t from ._utils import is_union_type as is_union_type from ._utils import required_args as required_args from ._utils import coerce_boolean as coerce_boolean diff --git a/src/anthropic/_utils/_proxy.py b/src/anthropic/_utils/_proxy.py index fd85ebd5..aa934a3f 100644 --- a/src/anthropic/_utils/_proxy.py +++ b/src/anthropic/_utils/_proxy.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from typing import Generic, TypeVar, Iterable, cast -from typing_extensions import ClassVar +from typing_extensions import ClassVar, override T = TypeVar("T") @@ -21,16 +21,20 @@ def __init__(self) -> None: def __getattr__(self, attr: str) -> object: return getattr(self.__get_proxied__(), attr) + @override def __repr__(self) -> str: return repr(self.__get_proxied__()) + @override def __str__(self) -> str: return str(self.__get_proxied__()) + @override def __dir__(self) -> Iterable[str]: return self.__get_proxied__().__dir__() @property # type: ignore + @override def __class__(self) -> type: return self.__get_proxied__().__class__ diff --git a/src/anthropic/_utils/_transform.py b/src/anthropic/_utils/_transform.py index c007d8b0..dc497ea3 100644 --- a/src/anthropic/_utils/_transform.py +++ b/src/anthropic/_utils/_transform.py @@ -2,7 +2,9 @@ from typing import Any, List, Mapping, TypeVar, cast from datetime import date, datetime -from typing_extensions import Literal, get_args, get_type_hints +from typing_extensions import Literal, get_args, override, get_type_hints + +import pydantic from ._utils import ( is_list, @@ -14,7 +16,7 @@ is_annotated_type, strip_annotated_type, ) -from .._compat import is_typeddict +from .._compat import model_dump, is_typeddict _T = TypeVar("_T") @@ -52,6 +54,7 @@ def __init__( self.format = format self.format_template = format_template + @override def __repr__(self) -> str: return f"{self.__class__.__name__}(alias='{self.alias}', format={self.format}, format_template='{self.format_template}')" @@ -92,7 +95,7 @@ class Params(TypedDict, total=False): return cast(_T, transformed) -def _get_annoted_type(type_: type) -> type | None: +def _get_annotated_type(type_: type) -> type | None: """If the given type is an `Annotated` type then it is returned, if not `None` is returned. This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]` @@ -112,7 +115,7 @@ def _maybe_transform_key(key: str, type_: type) -> str: Note: this function only looks at `Annotated` types that contain `PropertInfo` metadata. """ - annotated_type = _get_annoted_type(type_) + annotated_type = _get_annotated_type(type_) if annotated_type is None: # no `Annotated` definition for this type, no transformation needed return key @@ -164,11 +167,14 @@ def _transform_recursive( data = _transform_recursive(data, annotation=annotation, inner_type=subtype) return data + if isinstance(data, pydantic.BaseModel): + return model_dump(data, exclude_unset=True, exclude_defaults=True) + return _transform_value(data, annotation) def _transform_value(data: object, type_: type) -> object: - annotated_type = _get_annoted_type(type_) + annotated_type = _get_annotated_type(type_) if annotated_type is None: return data diff --git a/src/anthropic/_utils/_utils.py b/src/anthropic/_utils/_utils.py index cb660d16..4b51dcb2 100644 --- a/src/anthropic/_utils/_utils.py +++ b/src/anthropic/_utils/_utils.py @@ -1,11 +1,20 @@ from __future__ import annotations -import io import os import re import inspect import functools -from typing import Any, Mapping, TypeVar, Callable, Iterable, Sequence, cast, overload +from typing import ( + Any, + Tuple, + Mapping, + TypeVar, + Callable, + Iterable, + Sequence, + cast, + overload, +) from pathlib import Path from typing_extensions import Required, Annotated, TypeGuard, get_args, get_origin @@ -15,6 +24,9 @@ from .._compat import parse_datetime as parse_datetime _T = TypeVar("_T") +_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...]) +_MappingT = TypeVar("_MappingT", bound=Mapping[str, object]) +_SequenceT = TypeVar("_SequenceT", bound=Sequence[object]) CallableT = TypeVar("CallableT", bound=Callable[..., Any]) @@ -55,13 +67,11 @@ def _extract_items( # no value was provided - we can safely ignore return [] - # We have exhausted the path, return the entry we found. - if not isinstance(obj, bytes) and not isinstance(obj, tuple) and not isinstance(obj, io.IOBase): - raise RuntimeError( - f"Expected entry at {flattened_key} to be bytes, an io.IOBase instance or a tuple but received {type(obj)} instead." - ) from None + # cyclical import + from .._files import assert_is_file_content - # TODO: validate obj more? + # We have exhausted the path, return the entry we found. + assert_is_file_content(obj, key=flattened_key) assert flattened_key is not None return [(flattened_key, cast(FileTypes, obj))] @@ -116,12 +126,36 @@ def is_given(obj: NotGivenOr[_T]) -> TypeGuard[_T]: # The default narrowing for isinstance(obj, dict) is dict[unknown, unknown], # however this cause Pyright to rightfully report errors. As we know we don't # care about the contained types we can safely use `object` in it's place. +# +# There are two separate functions defined, `is_*` and `is_*_t` for different use cases. +# `is_*` is for when you're dealing with an unknown input +# `is_*_t` is for when you're narrowing a known union type to a specific subset + + +def is_tuple(obj: object) -> TypeGuard[tuple[object, ...]]: + return isinstance(obj, tuple) + + +def is_tuple_t(obj: _TupleT | object) -> TypeGuard[_TupleT]: + return isinstance(obj, tuple) + + +def is_sequence(obj: object) -> TypeGuard[Sequence[object]]: + return isinstance(obj, Sequence) + + +def is_sequence_t(obj: _SequenceT | object) -> TypeGuard[_SequenceT]: + return isinstance(obj, Sequence) def is_mapping(obj: object) -> TypeGuard[Mapping[str, object]]: return isinstance(obj, Mapping) +def is_mapping_t(obj: _MappingT | object) -> TypeGuard[_MappingT]: + return isinstance(obj, Mapping) + + def is_dict(obj: object) -> TypeGuard[dict[object, object]]: return isinstance(obj, dict) diff --git a/src/anthropic/_version.py b/src/anthropic/_version.py index f97a8267..8987a497 100644 --- a/src/anthropic/_version.py +++ b/src/anthropic/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. __title__ = "anthropic" -__version__ = "0.5.1" # x-release-please-version +__version__ = "0.6.0" # x-release-please-version diff --git a/src/anthropic/resources/__init__.py b/src/anthropic/resources/__init__.py index 206df985..bbb8787d 100644 --- a/src/anthropic/resources/__init__.py +++ b/src/anthropic/resources/__init__.py @@ -1,5 +1,10 @@ # File generated from our OpenAPI spec by Stainless. -from .completions import Completions, AsyncCompletions +from .completions import ( + Completions, + AsyncCompletions, + CompletionsWithRawResponse, + AsyncCompletionsWithRawResponse, +) -__all__ = ["Completions", "AsyncCompletions"] +__all__ = ["Completions", "AsyncCompletions", "CompletionsWithRawResponse", "AsyncCompletionsWithRawResponse"] diff --git a/src/anthropic/resources/completions.py b/src/anthropic/resources/completions.py index 1a88e4b2..a603634d 100644 --- a/src/anthropic/resources/completions.py +++ b/src/anthropic/resources/completions.py @@ -2,20 +2,32 @@ from __future__ import annotations -from typing import List, Union, overload +from typing import TYPE_CHECKING, List, Union, overload from typing_extensions import Literal +import httpx + from ..types import Completion, completion_create_params from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven from .._utils import required_args, maybe_transform from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import to_raw_response_wrapper, async_to_raw_response_wrapper from .._streaming import Stream, AsyncStream from .._base_client import make_request_options +if TYPE_CHECKING: + from .._client import Anthropic, AsyncAnthropic + __all__ = ["Completions", "AsyncCompletions"] class Completions(SyncAPIResource): + with_raw_response: CompletionsWithRawResponse + + def __init__(self, client: Anthropic) -> None: + super().__init__(client) + self.with_raw_response = CompletionsWithRawResponse(self) + @overload def create( self, @@ -34,7 +46,7 @@ def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = 600, ) -> Completion: """ Create a completion @@ -126,7 +138,7 @@ def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = 600, ) -> Stream[Completion]: """ Create a completion @@ -218,7 +230,7 @@ def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = 600, ) -> Completion | Stream[Completion]: """ Create a completion @@ -310,7 +322,7 @@ def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = 600, ) -> Completion | Stream[Completion]: return self._post( "/v1/complete", @@ -338,6 +350,12 @@ def create( class AsyncCompletions(AsyncAPIResource): + with_raw_response: AsyncCompletionsWithRawResponse + + def __init__(self, client: AsyncAnthropic) -> None: + super().__init__(client) + self.with_raw_response = AsyncCompletionsWithRawResponse(self) + @overload async def create( self, @@ -356,7 +374,7 @@ async def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = 600, ) -> Completion: """ Create a completion @@ -448,7 +466,7 @@ async def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = 600, ) -> AsyncStream[Completion]: """ Create a completion @@ -540,7 +558,7 @@ async def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = 600, ) -> Completion | AsyncStream[Completion]: """ Create a completion @@ -632,7 +650,7 @@ async def create( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | None | NotGiven = 600, + timeout: float | httpx.Timeout | None | NotGiven = 600, ) -> Completion | AsyncStream[Completion]: return await self._post( "/v1/complete", @@ -657,3 +675,17 @@ async def create( stream=stream or False, stream_cls=AsyncStream[Completion], ) + + +class CompletionsWithRawResponse: + def __init__(self, completions: Completions) -> None: + self.create = to_raw_response_wrapper( + completions.create, + ) + + +class AsyncCompletionsWithRawResponse: + def __init__(self, completions: AsyncCompletions) -> None: + self.create = async_to_raw_response_wrapper( + completions.create, + ) diff --git a/tests/api_resources/test_completions.py b/tests/api_resources/test_completions.py index a6e1334f..bc152842 100644 --- a/tests/api_resources/test_completions.py +++ b/tests/api_resources/test_completions.py @@ -9,6 +9,7 @@ from anthropic import Anthropic, AsyncAnthropic from tests.utils import assert_matches_type from anthropic.types import Completion +from anthropic._client import Anthropic, AsyncAnthropic base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") api_key = "my-anthropic-api-key" @@ -43,6 +44,17 @@ def test_method_create_with_all_params_overload_1(self, client: Anthropic) -> No ) assert_matches_type(Completion, completion, path=["response"]) + @parametrize + def test_raw_response_create_overload_1(self, client: Anthropic) -> None: + response = client.completions.with_raw_response.create( + max_tokens_to_sample=256, + model="claude-2", + prompt="\n\nHuman: Hello, world!\n\nAssistant:", + ) + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + completion = response.parse() + assert_matches_type(Completion, completion, path=["response"]) + @parametrize def test_method_create_overload_2(self, client: Anthropic) -> None: client.completions.create( @@ -66,6 +78,17 @@ def test_method_create_with_all_params_overload_2(self, client: Anthropic) -> No top_p=0.7, ) + @parametrize + def test_raw_response_create_overload_2(self, client: Anthropic) -> None: + response = client.completions.with_raw_response.create( + max_tokens_to_sample=256, + model="claude-2", + prompt="\n\nHuman: Hello, world!\n\nAssistant:", + stream=True, + ) + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + response.parse() + class TestAsyncCompletions: strict_client = AsyncAnthropic(base_url=base_url, api_key=api_key, _strict_response_validation=True) @@ -96,6 +119,17 @@ async def test_method_create_with_all_params_overload_1(self, client: AsyncAnthr ) assert_matches_type(Completion, completion, path=["response"]) + @parametrize + async def test_raw_response_create_overload_1(self, client: AsyncAnthropic) -> None: + response = await client.completions.with_raw_response.create( + max_tokens_to_sample=256, + model="claude-2", + prompt="\n\nHuman: Hello, world!\n\nAssistant:", + ) + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + completion = response.parse() + assert_matches_type(Completion, completion, path=["response"]) + @parametrize async def test_method_create_overload_2(self, client: AsyncAnthropic) -> None: await client.completions.create( @@ -118,3 +152,14 @@ async def test_method_create_with_all_params_overload_2(self, client: AsyncAnthr top_k=5, top_p=0.7, ) + + @parametrize + async def test_raw_response_create_overload_2(self, client: AsyncAnthropic) -> None: + response = await client.completions.with_raw_response.create( + max_tokens_to_sample=256, + model="claude-2", + prompt="\n\nHuman: Hello, world!\n\nAssistant:", + stream=True, + ) + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + response.parse() diff --git a/tests/api_resources/test_top_level.py b/tests/api_resources/test_top_level.py index 6993b03e..5ffd9186 100644 --- a/tests/api_resources/test_top_level.py +++ b/tests/api_resources/test_top_level.py @@ -7,6 +7,7 @@ import pytest from anthropic import Anthropic, AsyncAnthropic +from anthropic._client import Anthropic, AsyncAnthropic base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") api_key = "my-anthropic-api-key" diff --git a/tests/test_client.py b/tests/test_client.py index b7797697..56438da5 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -16,6 +16,7 @@ from anthropic import Anthropic, AsyncAnthropic, APIResponseValidationError from anthropic._types import Omit +from anthropic._client import Anthropic, AsyncAnthropic from anthropic._models import BaseModel, FinalRequestOptions from anthropic._streaming import Stream, AsyncStream from anthropic._exceptions import APIResponseValidationError @@ -30,7 +31,7 @@ api_key = "my-anthropic-api-key" -def _get_params(client: BaseClient[Any]) -> dict[str, str]: +def _get_params(client: BaseClient[Any, Any]) -> dict[str, str]: request = client._build_request(FinalRequestOptions(method="get", url="/foo")) url = httpx.URL(request.url) return dict(url.params) @@ -41,12 +42,23 @@ class TestAnthropic: @pytest.mark.respx(base_url=base_url) def test_raw_response(self, respx_mock: MockRouter) -> None: - respx_mock.post("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + respx_mock.post("/foo").mock(return_value=httpx.Response(200, json='{"foo": "bar"}')) response = self.client.post("/foo", cast_to=httpx.Response) assert response.status_code == 200 assert isinstance(response, httpx.Response) - assert response.json() == {"foo": "bar"} + assert response.json() == '{"foo": "bar"}' + + @pytest.mark.respx(base_url=base_url) + def test_raw_response_for_binary(self, respx_mock: MockRouter) -> None: + respx_mock.post("/foo").mock( + return_value=httpx.Response(200, headers={"Content-Type": "application/binary"}, content='{"foo": "bar"}') + ) + + response = self.client.post("/foo", cast_to=httpx.Response) + assert response.status_code == 200 + assert isinstance(response, httpx.Response) + assert response.json() == '{"foo": "bar"}' def test_copy(self) -> None: copied = self.client.copy() @@ -625,18 +637,20 @@ class Model(BaseModel): "remaining_retries,retry_after,timeout", [ [3, "20", 20], - [3, "0", 2], - [3, "-10", 2], + [3, "0", 0.5], + [3, "-10", 0.5], [3, "60", 60], - [3, "61", 2], + [3, "61", 0.5], [3, "Fri, 29 Sep 2023 16:26:57 GMT", 20], - [3, "Fri, 29 Sep 2023 16:26:37 GMT", 2], - [3, "Fri, 29 Sep 2023 16:26:27 GMT", 2], + [3, "Fri, 29 Sep 2023 16:26:37 GMT", 0.5], + [3, "Fri, 29 Sep 2023 16:26:27 GMT", 0.5], [3, "Fri, 29 Sep 2023 16:27:37 GMT", 60], - [3, "Fri, 29 Sep 2023 16:27:38 GMT", 2], - [3, "99999999999999999999999999999999999", 2], - [3, "Zun, 29 Sep 2023 16:26:27 GMT", 2], - [3, "", 2], + [3, "Fri, 29 Sep 2023 16:27:38 GMT", 0.5], + [3, "99999999999999999999999999999999999", 0.5], + [3, "Zun, 29 Sep 2023 16:26:27 GMT", 0.5], + [3, "", 0.5], + [2, "", 0.5 * 2.0], + [1, "", 0.5 * 4.0], ], ) @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) @@ -644,9 +658,9 @@ def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str client = Anthropic(base_url=base_url, api_key=api_key, _strict_response_validation=True) headers = httpx.Headers({"retry-after": retry_after}) - options = FinalRequestOptions(method="get", url="/foo", max_retries=2) + options = FinalRequestOptions(method="get", url="/foo", max_retries=3) calculated = client._calculate_retry_timeout(remaining_retries, options, headers) - assert calculated == pytest.approx(timeout, 0.6) # pyright: ignore[reportUnknownMemberType] + assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType] class TestAsyncAnthropic: @@ -655,12 +669,24 @@ class TestAsyncAnthropic: @pytest.mark.respx(base_url=base_url) @pytest.mark.asyncio async def test_raw_response(self, respx_mock: MockRouter) -> None: - respx_mock.post("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + respx_mock.post("/foo").mock(return_value=httpx.Response(200, json='{"foo": "bar"}')) + + response = await self.client.post("/foo", cast_to=httpx.Response) + assert response.status_code == 200 + assert isinstance(response, httpx.Response) + assert response.json() == '{"foo": "bar"}' + + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_raw_response_for_binary(self, respx_mock: MockRouter) -> None: + respx_mock.post("/foo").mock( + return_value=httpx.Response(200, headers={"Content-Type": "application/binary"}, content='{"foo": "bar"}') + ) response = await self.client.post("/foo", cast_to=httpx.Response) assert response.status_code == 200 assert isinstance(response, httpx.Response) - assert response.json() == {"foo": "bar"} + assert response.json() == '{"foo": "bar"}' def test_copy(self) -> None: copied = self.client.copy() @@ -1252,18 +1278,20 @@ class Model(BaseModel): "remaining_retries,retry_after,timeout", [ [3, "20", 20], - [3, "0", 2], - [3, "-10", 2], + [3, "0", 0.5], + [3, "-10", 0.5], [3, "60", 60], - [3, "61", 2], + [3, "61", 0.5], [3, "Fri, 29 Sep 2023 16:26:57 GMT", 20], - [3, "Fri, 29 Sep 2023 16:26:37 GMT", 2], - [3, "Fri, 29 Sep 2023 16:26:27 GMT", 2], + [3, "Fri, 29 Sep 2023 16:26:37 GMT", 0.5], + [3, "Fri, 29 Sep 2023 16:26:27 GMT", 0.5], [3, "Fri, 29 Sep 2023 16:27:37 GMT", 60], - [3, "Fri, 29 Sep 2023 16:27:38 GMT", 2], - [3, "99999999999999999999999999999999999", 2], - [3, "Zun, 29 Sep 2023 16:26:27 GMT", 2], - [3, "", 2], + [3, "Fri, 29 Sep 2023 16:27:38 GMT", 0.5], + [3, "99999999999999999999999999999999999", 0.5], + [3, "Zun, 29 Sep 2023 16:26:27 GMT", 0.5], + [3, "", 0.5], + [2, "", 0.5 * 2.0], + [1, "", 0.5 * 4.0], ], ) @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) @@ -1272,6 +1300,6 @@ async def test_parse_retry_after_header(self, remaining_retries: int, retry_afte client = AsyncAnthropic(base_url=base_url, api_key=api_key, _strict_response_validation=True) headers = httpx.Headers({"retry-after": retry_after}) - options = FinalRequestOptions(method="get", url="/foo", max_retries=2) + options = FinalRequestOptions(method="get", url="/foo", max_retries=3) calculated = client._calculate_retry_timeout(remaining_retries, options, headers) - assert calculated == pytest.approx(timeout, 0.6) # pyright: ignore[reportUnknownMemberType] + assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType] diff --git a/tests/test_extract_files.py b/tests/test_extract_files.py index 988cf179..02c0614f 100644 --- a/tests/test_extract_files.py +++ b/tests/test_extract_files.py @@ -54,7 +54,7 @@ def test_multiple_files() -> None: [], ], ], - ids=["dict expecting array", "arraye expecting dict", "unknown keys"], + ids=["dict expecting array", "array expecting dict", "unknown keys"], ) def test_ignores_incorrect_paths( query: dict[str, object], diff --git a/tests/test_files.py b/tests/test_files.py new file mode 100644 index 00000000..4b118fe0 --- /dev/null +++ b/tests/test_files.py @@ -0,0 +1,51 @@ +from pathlib import Path + +import anyio +import pytest +from dirty_equals import IsDict, IsList, IsBytes, IsTuple + +from anthropic._files import to_httpx_files, async_to_httpx_files + +readme_path = Path(__file__).parent.parent.joinpath("README.md") + + +def test_pathlib_includes_file_name() -> None: + result = to_httpx_files({"file": readme_path}) + print(result) + assert result == IsDict({"file": IsTuple("README.md", IsBytes())}) + + +def test_tuple_input() -> None: + result = to_httpx_files([("file", readme_path)]) + print(result) + assert result == IsList(IsTuple("file", IsTuple("README.md", IsBytes()))) + + +@pytest.mark.asyncio +async def test_async_pathlib_includes_file_name() -> None: + result = await async_to_httpx_files({"file": readme_path}) + print(result) + assert result == IsDict({"file": IsTuple("README.md", IsBytes())}) + + +@pytest.mark.asyncio +async def test_async_supports_anyio_path() -> None: + result = await async_to_httpx_files({"file": anyio.Path(readme_path)}) + print(result) + assert result == IsDict({"file": IsTuple("README.md", IsBytes())}) + + +@pytest.mark.asyncio +async def test_async_tuple_input() -> None: + result = await async_to_httpx_files([("file", readme_path)]) + print(result) + assert result == IsList(IsTuple("file", IsTuple("README.md", IsBytes()))) + + +def test_string_not_allowed() -> None: + with pytest.raises(TypeError, match="Expected file types input to be a FileContent type or to be a tuple"): + to_httpx_files( + { + "file": "foo", # type: ignore + } + ) diff --git a/tests/test_models.py b/tests/test_models.py index 1c18b5e7..d34e3236 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -439,21 +439,32 @@ class Model(BaseModel): assert model_json(model) == expected_json -def test_coerces_int() -> None: +def test_does_not_coerce_int() -> None: class Model(BaseModel): bar: int assert Model.construct(bar=1).bar == 1 - assert Model.construct(bar=10.9).bar == 10 - assert Model.construct(bar="19").bar == 19 - assert Model.construct(bar=False).bar == 0 + assert Model.construct(bar=10.9).bar == 10.9 + assert Model.construct(bar="19").bar == "19" # type: ignore[comparison-overlap] + assert Model.construct(bar=False).bar is False - # TODO: support this - # assert Model.construct(bar="True").bar == 1 - # mismatched types are left as-is - m = Model.construct(bar={"foo": "bar"}) - assert m.bar == {"foo": "bar"} # type: ignore[comparison-overlap] +def test_int_to_float_safe_conversion() -> None: + class Model(BaseModel): + float_field: float + + m = Model.construct(float_field=10) + assert m.float_field == 10.0 + assert isinstance(m.float_field, float) + + m = Model.construct(float_field=10.12) + assert m.float_field == 10.12 + assert isinstance(m.float_field, float) + + # number too big + m = Model.construct(float_field=2**53 + 1) + assert m.float_field == 2**53 + 1 + assert isinstance(m.float_field, int) def test_deprecated_alias() -> None: diff --git a/tests/test_qs.py b/tests/test_qs.py index b21842e0..564a41e6 100644 --- a/tests/test_qs.py +++ b/tests/test_qs.py @@ -61,6 +61,18 @@ def test_array_repeat() -> None: assert unquote(stringify({"in": ["foo", {"b": {"c": ["d", "e"]}}]})) == "in=foo&in[b][c]=d&in[b][c]=e" +@pytest.mark.parametrize("method", ["class", "function"]) +def test_array_brackets(method: str) -> None: + if method == "class": + serialise = Querystring(array_format="brackets").stringify + else: + serialise = partial(stringify, array_format="brackets") + + assert unquote(serialise({"in": ["foo", "bar"]})) == "in[]=foo&in[]=bar" + assert unquote(serialise({"a": {"b": [True, False]}})) == "a[b][]=true&a[b][]=false" + assert unquote(serialise({"a": {"b": [True, False, None, True]}})) == "a[b][]=true&a[b][]=false&a[b][]=true" + + def test_unknown_array_format() -> None: with pytest.raises(NotImplementedError, match="Unknown array_format value: foo, choose from comma, repeat"): stringify({"a": ["foo", "bar"]}, array_format=cast(Any, "foo")) diff --git a/tests/test_transform.py b/tests/test_transform.py index b7334957..8e1d4724 100644 --- a/tests/test_transform.py +++ b/tests/test_transform.py @@ -1,10 +1,13 @@ from __future__ import annotations -from typing import List, Union, Optional +from typing import Any, List, Union, Optional from datetime import date, datetime from typing_extensions import Required, Annotated, TypedDict +import pytest + from anthropic._utils import PropertyInfo, transform, parse_datetime +from anthropic._models import BaseModel class Foo1(TypedDict): @@ -186,3 +189,44 @@ class DateDictWithRequiredAlias(TypedDict, total=False): def test_datetime_with_alias() -> None: assert transform({"required_prop": None}, DateDictWithRequiredAlias) == {"prop": None} # type: ignore[comparison-overlap] assert transform({"required_prop": date.fromisoformat("2023-02-23")}, DateDictWithRequiredAlias) == {"prop": "2023-02-23"} # type: ignore[comparison-overlap] + + +class MyModel(BaseModel): + foo: str + + +def test_pydantic_model_to_dictionary() -> None: + assert transform(MyModel(foo="hi!"), Any) == {"foo": "hi!"} + assert transform(MyModel.construct(foo="hi!"), Any) == {"foo": "hi!"} + + +def test_pydantic_empty_model() -> None: + assert transform(MyModel.construct(), Any) == {} + + +def test_pydantic_unknown_field() -> None: + assert transform(MyModel.construct(my_untyped_field=True), Any) == {"my_untyped_field": True} + + +def test_pydantic_mismatched_types() -> None: + model = MyModel.construct(foo=True) + with pytest.warns(UserWarning): + params = transform(model, Any) + assert params == {"foo": True} + + +def test_pydantic_mismatched_object_type() -> None: + model = MyModel.construct(foo=MyModel.construct(hello="world")) + with pytest.warns(UserWarning): + params = transform(model, Any) + assert params == {"foo": {"hello": "world"}} + + +class ModelNestedObjects(BaseModel): + nested: MyModel + + +def test_pydantic_nested_objects() -> None: + model = ModelNestedObjects.construct(nested={"foo": "stainless"}) + assert isinstance(model.nested, MyModel) + assert transform(model, Any) == {"nested": {"foo": "stainless"}}