diff --git a/.stats.yml b/.stats.yml
index 63da3bd2..61bec13e 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,2 +1,2 @@
-configured_endpoints: 9
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic-134649e8f68418dc1cde6589ef6c63f79f65d8cbcc8732a6652354ade6548d16.yml
+configured_endpoints: 10
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic-d95f5b98650cf1d0a75bd514eaa6705bef41aa89e8fe37e849ccdde57a91aaa2.yml
diff --git a/api.md b/api.md
index b51fe19d..ef21566a 100644
--- a/api.md
+++ b/api.md
@@ -71,6 +71,8 @@ Types:
```python
from anthropic.types.beta import (
+ BetaBase64PDFBlock,
+ BetaBase64PDFSource,
BetaCacheControlEphemeral,
BetaContentBlock,
BetaContentBlockParam,
@@ -79,6 +81,7 @@ from anthropic.types.beta import (
BetaMessage,
BetaMessageDeltaUsage,
BetaMessageParam,
+ BetaMessageTokensCount,
BetaMetadata,
BetaRawContentBlockDeltaEvent,
BetaRawContentBlockStartEvent,
@@ -109,6 +112,7 @@ from anthropic.types.beta import (
Methods:
- client.beta.messages.create(\*\*params) -> BetaMessage
+- client.beta.messages.count_tokens(\*\*params) -> BetaMessageTokensCount
### Batches
diff --git a/src/anthropic/resources/beta/messages/messages.py b/src/anthropic/resources/beta/messages/messages.py
index c0a1f26f..12a3273f 100644
--- a/src/anthropic/resources/beta/messages/messages.py
+++ b/src/anthropic/resources/beta/messages/messages.py
@@ -29,7 +29,7 @@
from ...._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper
from ...._constants import DEFAULT_TIMEOUT
from ...._streaming import Stream, AsyncStream
-from ....types.beta import message_create_params
+from ....types.beta import message_create_params, message_count_tokens_params
from ...._base_client import make_request_options
from ....types.model_param import ModelParam
from ....types.beta.beta_message import BetaMessage
@@ -39,6 +39,7 @@
from ....types.beta.beta_text_block_param import BetaTextBlockParam
from ....types.beta.beta_tool_union_param import BetaToolUnionParam
from ....types.beta.beta_tool_choice_param import BetaToolChoiceParam
+from ....types.beta.beta_message_tokens_count import BetaMessageTokensCount
from ....types.beta.beta_raw_message_stream_event import BetaRawMessageStreamEvent
__all__ = ["Messages", "AsyncMessages"]
@@ -920,6 +921,230 @@ def create(
stream_cls=Stream[BetaRawMessageStreamEvent],
)
+ def count_tokens(
+ self,
+ *,
+ messages: Iterable[BetaMessageParam],
+ model: ModelParam,
+ system: Union[str, Iterable[BetaTextBlockParam]] | NotGiven = NOT_GIVEN,
+ tool_choice: BetaToolChoiceParam | NotGiven = NOT_GIVEN,
+ tools: Iterable[message_count_tokens_params.Tool] | NotGiven = NOT_GIVEN,
+ betas: List[AnthropicBetaParam] | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> BetaMessageTokensCount:
+ """
+ Count the number of tokens in a Message.
+
+ The Token Count API can be used to count the number of tokens in a Message,
+ including tools, images, and documents, without creating it.
+
+ Args:
+ messages: Input messages.
+
+ Our models are trained to operate on alternating `user` and `assistant`
+ conversational turns. When creating a new `Message`, you specify the prior
+ conversational turns with the `messages` parameter, and the model then generates
+ the next `Message` in the conversation. Consecutive `user` or `assistant` turns
+ in your request will be combined into a single turn.
+
+ Each input message must be an object with a `role` and `content`. You can
+ specify a single `user`-role message, or you can include multiple `user` and
+ `assistant` messages.
+
+ If the final message uses the `assistant` role, the response content will
+ continue immediately from the content in that message. This can be used to
+ constrain part of the model's response.
+
+ Example with a single `user` message:
+
+ ```json
+ [{ "role": "user", "content": "Hello, Claude" }]
+ ```
+
+ Example with multiple conversational turns:
+
+ ```json
+ [
+ { "role": "user", "content": "Hello there." },
+ { "role": "assistant", "content": "Hi, I'm Claude. How can I help you?" },
+ { "role": "user", "content": "Can you explain LLMs in plain English?" }
+ ]
+ ```
+
+ Example with a partially-filled response from Claude:
+
+ ```json
+ [
+ {
+ "role": "user",
+ "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"
+ },
+ { "role": "assistant", "content": "The best answer is (" }
+ ]
+ ```
+
+ Each input message `content` may be either a single `string` or an array of
+ content blocks, where each block has a specific `type`. Using a `string` for
+ `content` is shorthand for an array of one content block of type `"text"`. The
+ following input messages are equivalent:
+
+ ```json
+ { "role": "user", "content": "Hello, Claude" }
+ ```
+
+ ```json
+ { "role": "user", "content": [{ "type": "text", "text": "Hello, Claude" }] }
+ ```
+
+ Starting with Claude 3 models, you can also send image content blocks:
+
+ ```json
+ {
+ "role": "user",
+ "content": [
+ {
+ "type": "image",
+ "source": {
+ "type": "base64",
+ "media_type": "image/jpeg",
+ "data": "/9j/4AAQSkZJRg..."
+ }
+ },
+ { "type": "text", "text": "What is in this image?" }
+ ]
+ }
+ ```
+
+ We currently support the `base64` source type for images, and the `image/jpeg`,
+ `image/png`, `image/gif`, and `image/webp` media types.
+
+ See [examples](https://docs.anthropic.com/en/api/messages-examples#vision) for
+ more input examples.
+
+ Note that if you want to include a
+ [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use
+ the top-level `system` parameter — there is no `"system"` role for input
+ messages in the Messages API.
+
+ model: The model that will complete your prompt.\n\nSee
+ [models](https://docs.anthropic.com/en/docs/models-overview) for additional
+ details and options.
+
+ system: System prompt.
+
+ A system prompt is a way of providing context and instructions to Claude, such
+ as specifying a particular goal or role. See our
+ [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
+
+ tool_choice: How the model should use the provided tools. The model can use a specific tool,
+ any available tool, or decide by itself.
+
+ tools: Definitions of tools that the model may use.
+
+ If you include `tools` in your API request, the model may return `tool_use`
+ content blocks that represent the model's use of those tools. You can then run
+ those tools using the tool input generated by the model and then optionally
+ return results back to the model using `tool_result` content blocks.
+
+ Each tool definition includes:
+
+ - `name`: Name of the tool.
+ - `description`: Optional, but strongly-recommended description of the tool.
+ - `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input`
+ shape that the model will produce in `tool_use` output content blocks.
+
+ For example, if you defined `tools` as:
+
+ ```json
+ [
+ {
+ "name": "get_stock_price",
+ "description": "Get the current stock price for a given ticker symbol.",
+ "input_schema": {
+ "type": "object",
+ "properties": {
+ "ticker": {
+ "type": "string",
+ "description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
+ }
+ },
+ "required": ["ticker"]
+ }
+ }
+ ]
+ ```
+
+ And then asked the model "What's the S&P 500 at today?", the model might produce
+ `tool_use` content blocks in the response like this:
+
+ ```json
+ [
+ {
+ "type": "tool_use",
+ "id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ "name": "get_stock_price",
+ "input": { "ticker": "^GSPC" }
+ }
+ ]
+ ```
+
+ You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an
+ input, and return the following back to the model in a subsequent `user`
+ message:
+
+ ```json
+ [
+ {
+ "type": "tool_result",
+ "tool_use_id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ "content": "259.75 USD"
+ }
+ ]
+ ```
+
+ Tools can be used for workflows that include running client-side tools and
+ functions, or more generally whenever you want the model to produce a particular
+ JSON structure of output.
+
+ See our [guide](https://docs.anthropic.com/en/docs/tool-use) for more details.
+
+ betas: Optional header to specify the beta version(s) you want to use.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ extra_headers = {
+ **strip_not_given({"anthropic-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}),
+ **(extra_headers or {}),
+ }
+ return self._post(
+ "/v1/messages/count_tokens?beta=true",
+ body=maybe_transform(
+ {
+ "messages": messages,
+ "model": model,
+ "system": system,
+ "tool_choice": tool_choice,
+ "tools": tools,
+ },
+ message_count_tokens_params.MessageCountTokensParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=BetaMessageTokensCount,
+ )
+
class AsyncMessages(AsyncAPIResource):
@cached_property
@@ -1797,6 +2022,230 @@ async def create(
stream_cls=AsyncStream[BetaRawMessageStreamEvent],
)
+ async def count_tokens(
+ self,
+ *,
+ messages: Iterable[BetaMessageParam],
+ model: ModelParam,
+ system: Union[str, Iterable[BetaTextBlockParam]] | NotGiven = NOT_GIVEN,
+ tool_choice: BetaToolChoiceParam | NotGiven = NOT_GIVEN,
+ tools: Iterable[message_count_tokens_params.Tool] | NotGiven = NOT_GIVEN,
+ betas: List[AnthropicBetaParam] | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> BetaMessageTokensCount:
+ """
+ Count the number of tokens in a Message.
+
+ The Token Count API can be used to count the number of tokens in a Message,
+ including tools, images, and documents, without creating it.
+
+ Args:
+ messages: Input messages.
+
+ Our models are trained to operate on alternating `user` and `assistant`
+ conversational turns. When creating a new `Message`, you specify the prior
+ conversational turns with the `messages` parameter, and the model then generates
+ the next `Message` in the conversation. Consecutive `user` or `assistant` turns
+ in your request will be combined into a single turn.
+
+ Each input message must be an object with a `role` and `content`. You can
+ specify a single `user`-role message, or you can include multiple `user` and
+ `assistant` messages.
+
+ If the final message uses the `assistant` role, the response content will
+ continue immediately from the content in that message. This can be used to
+ constrain part of the model's response.
+
+ Example with a single `user` message:
+
+ ```json
+ [{ "role": "user", "content": "Hello, Claude" }]
+ ```
+
+ Example with multiple conversational turns:
+
+ ```json
+ [
+ { "role": "user", "content": "Hello there." },
+ { "role": "assistant", "content": "Hi, I'm Claude. How can I help you?" },
+ { "role": "user", "content": "Can you explain LLMs in plain English?" }
+ ]
+ ```
+
+ Example with a partially-filled response from Claude:
+
+ ```json
+ [
+ {
+ "role": "user",
+ "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"
+ },
+ { "role": "assistant", "content": "The best answer is (" }
+ ]
+ ```
+
+ Each input message `content` may be either a single `string` or an array of
+ content blocks, where each block has a specific `type`. Using a `string` for
+ `content` is shorthand for an array of one content block of type `"text"`. The
+ following input messages are equivalent:
+
+ ```json
+ { "role": "user", "content": "Hello, Claude" }
+ ```
+
+ ```json
+ { "role": "user", "content": [{ "type": "text", "text": "Hello, Claude" }] }
+ ```
+
+ Starting with Claude 3 models, you can also send image content blocks:
+
+ ```json
+ {
+ "role": "user",
+ "content": [
+ {
+ "type": "image",
+ "source": {
+ "type": "base64",
+ "media_type": "image/jpeg",
+ "data": "/9j/4AAQSkZJRg..."
+ }
+ },
+ { "type": "text", "text": "What is in this image?" }
+ ]
+ }
+ ```
+
+ We currently support the `base64` source type for images, and the `image/jpeg`,
+ `image/png`, `image/gif`, and `image/webp` media types.
+
+ See [examples](https://docs.anthropic.com/en/api/messages-examples#vision) for
+ more input examples.
+
+ Note that if you want to include a
+ [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use
+ the top-level `system` parameter — there is no `"system"` role for input
+ messages in the Messages API.
+
+ model: The model that will complete your prompt.\n\nSee
+ [models](https://docs.anthropic.com/en/docs/models-overview) for additional
+ details and options.
+
+ system: System prompt.
+
+ A system prompt is a way of providing context and instructions to Claude, such
+ as specifying a particular goal or role. See our
+ [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
+
+ tool_choice: How the model should use the provided tools. The model can use a specific tool,
+ any available tool, or decide by itself.
+
+ tools: Definitions of tools that the model may use.
+
+ If you include `tools` in your API request, the model may return `tool_use`
+ content blocks that represent the model's use of those tools. You can then run
+ those tools using the tool input generated by the model and then optionally
+ return results back to the model using `tool_result` content blocks.
+
+ Each tool definition includes:
+
+ - `name`: Name of the tool.
+ - `description`: Optional, but strongly-recommended description of the tool.
+ - `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input`
+ shape that the model will produce in `tool_use` output content blocks.
+
+ For example, if you defined `tools` as:
+
+ ```json
+ [
+ {
+ "name": "get_stock_price",
+ "description": "Get the current stock price for a given ticker symbol.",
+ "input_schema": {
+ "type": "object",
+ "properties": {
+ "ticker": {
+ "type": "string",
+ "description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
+ }
+ },
+ "required": ["ticker"]
+ }
+ }
+ ]
+ ```
+
+ And then asked the model "What's the S&P 500 at today?", the model might produce
+ `tool_use` content blocks in the response like this:
+
+ ```json
+ [
+ {
+ "type": "tool_use",
+ "id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ "name": "get_stock_price",
+ "input": { "ticker": "^GSPC" }
+ }
+ ]
+ ```
+
+ You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an
+ input, and return the following back to the model in a subsequent `user`
+ message:
+
+ ```json
+ [
+ {
+ "type": "tool_result",
+ "tool_use_id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ "content": "259.75 USD"
+ }
+ ]
+ ```
+
+ Tools can be used for workflows that include running client-side tools and
+ functions, or more generally whenever you want the model to produce a particular
+ JSON structure of output.
+
+ See our [guide](https://docs.anthropic.com/en/docs/tool-use) for more details.
+
+ betas: Optional header to specify the beta version(s) you want to use.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ extra_headers = {
+ **strip_not_given({"anthropic-beta": ",".join(str(e) for e in betas) if is_given(betas) else NOT_GIVEN}),
+ **(extra_headers or {}),
+ }
+ return await self._post(
+ "/v1/messages/count_tokens?beta=true",
+ body=await async_maybe_transform(
+ {
+ "messages": messages,
+ "model": model,
+ "system": system,
+ "tool_choice": tool_choice,
+ "tools": tools,
+ },
+ message_count_tokens_params.MessageCountTokensParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=BetaMessageTokensCount,
+ )
+
class MessagesWithRawResponse:
def __init__(self, messages: Messages) -> None:
@@ -1805,6 +2254,9 @@ def __init__(self, messages: Messages) -> None:
self.create = _legacy_response.to_raw_response_wrapper(
messages.create,
)
+ self.count_tokens = _legacy_response.to_raw_response_wrapper(
+ messages.count_tokens,
+ )
@cached_property
def batches(self) -> BatchesWithRawResponse:
@@ -1818,6 +2270,9 @@ def __init__(self, messages: AsyncMessages) -> None:
self.create = _legacy_response.async_to_raw_response_wrapper(
messages.create,
)
+ self.count_tokens = _legacy_response.async_to_raw_response_wrapper(
+ messages.count_tokens,
+ )
@cached_property
def batches(self) -> AsyncBatchesWithRawResponse:
@@ -1831,6 +2286,9 @@ def __init__(self, messages: Messages) -> None:
self.create = to_streamed_response_wrapper(
messages.create,
)
+ self.count_tokens = to_streamed_response_wrapper(
+ messages.count_tokens,
+ )
@cached_property
def batches(self) -> BatchesWithStreamingResponse:
@@ -1844,6 +2302,9 @@ def __init__(self, messages: AsyncMessages) -> None:
self.create = async_to_streamed_response_wrapper(
messages.create,
)
+ self.count_tokens = async_to_streamed_response_wrapper(
+ messages.count_tokens,
+ )
@cached_property
def batches(self) -> AsyncBatchesWithStreamingResponse:
diff --git a/src/anthropic/types/anthropic_beta_param.py b/src/anthropic/types/anthropic_beta_param.py
index 1a3350e3..b1d7e296 100644
--- a/src/anthropic/types/anthropic_beta_param.py
+++ b/src/anthropic/types/anthropic_beta_param.py
@@ -8,5 +8,6 @@
__all__ = ["AnthropicBetaParam"]
AnthropicBetaParam: TypeAlias = Union[
- str, Literal["message-batches-2024-09-24", "prompt-caching-2024-07-31", "computer-use-2024-10-22"]
+ str,
+ Literal["message-batches-2024-09-24", "prompt-caching-2024-07-31", "computer-use-2024-10-22", "pdfs-2024-09-25"],
]
diff --git a/src/anthropic/types/beta/__init__.py b/src/anthropic/types/beta/__init__.py
index 9bd04054..cf5fd496 100644
--- a/src/anthropic/types/beta/__init__.py
+++ b/src/anthropic/types/beta/__init__.py
@@ -19,11 +19,15 @@
from .beta_tool_choice_param import BetaToolChoiceParam as BetaToolChoiceParam
from .beta_content_block_param import BetaContentBlockParam as BetaContentBlockParam
from .beta_message_delta_usage import BetaMessageDeltaUsage as BetaMessageDeltaUsage
+from .beta_message_tokens_count import BetaMessageTokensCount as BetaMessageTokensCount
from .beta_tool_use_block_param import BetaToolUseBlockParam as BetaToolUseBlockParam
from .beta_tool_choice_any_param import BetaToolChoiceAnyParam as BetaToolChoiceAnyParam
+from .beta_base64_pdf_block_param import BetaBase64PDFBlockParam as BetaBase64PDFBlockParam
from .beta_raw_message_stop_event import BetaRawMessageStopEvent as BetaRawMessageStopEvent
from .beta_tool_choice_auto_param import BetaToolChoiceAutoParam as BetaToolChoiceAutoParam
from .beta_tool_choice_tool_param import BetaToolChoiceToolParam as BetaToolChoiceToolParam
+from .message_count_tokens_params import MessageCountTokensParams as MessageCountTokensParams
+from .beta_base64_pdf_source_param import BetaBase64PDFSourceParam as BetaBase64PDFSourceParam
from .beta_raw_message_delta_event import BetaRawMessageDeltaEvent as BetaRawMessageDeltaEvent
from .beta_raw_message_start_event import BetaRawMessageStartEvent as BetaRawMessageStartEvent
from .beta_tool_result_block_param import BetaToolResultBlockParam as BetaToolResultBlockParam
diff --git a/src/anthropic/types/beta/beta_base64_pdf_block_param.py b/src/anthropic/types/beta/beta_base64_pdf_block_param.py
new file mode 100644
index 00000000..43a21329
--- /dev/null
+++ b/src/anthropic/types/beta/beta_base64_pdf_block_param.py
@@ -0,0 +1,19 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Optional
+from typing_extensions import Literal, Required, TypedDict
+
+from .beta_base64_pdf_source_param import BetaBase64PDFSourceParam
+from .beta_cache_control_ephemeral_param import BetaCacheControlEphemeralParam
+
+__all__ = ["BetaBase64PDFBlockParam"]
+
+
+class BetaBase64PDFBlockParam(TypedDict, total=False):
+ source: Required[BetaBase64PDFSourceParam]
+
+ type: Required[Literal["document"]]
+
+ cache_control: Optional[BetaCacheControlEphemeralParam]
diff --git a/src/anthropic/types/beta/beta_base64_pdf_source_param.py b/src/anthropic/types/beta/beta_base64_pdf_source_param.py
new file mode 100644
index 00000000..1137c957
--- /dev/null
+++ b/src/anthropic/types/beta/beta_base64_pdf_source_param.py
@@ -0,0 +1,23 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Union
+from typing_extensions import Literal, Required, Annotated, TypedDict
+
+from ..._types import Base64FileInput
+from ..._utils import PropertyInfo
+from ..._models import set_pydantic_config
+
+__all__ = ["BetaBase64PDFSourceParam"]
+
+
+class BetaBase64PDFSourceParam(TypedDict, total=False):
+ data: Required[Annotated[Union[str, Base64FileInput], PropertyInfo(format="base64")]]
+
+ media_type: Required[Literal["application/pdf"]]
+
+ type: Required[Literal["base64"]]
+
+
+set_pydantic_config(BetaBase64PDFSourceParam, {"arbitrary_types_allowed": True})
diff --git a/src/anthropic/types/beta/beta_content_block_param.py b/src/anthropic/types/beta/beta_content_block_param.py
index c1ef6b5f..0cf1b7c7 100644
--- a/src/anthropic/types/beta/beta_content_block_param.py
+++ b/src/anthropic/types/beta/beta_content_block_param.py
@@ -8,10 +8,11 @@
from .beta_text_block_param import BetaTextBlockParam
from .beta_image_block_param import BetaImageBlockParam
from .beta_tool_use_block_param import BetaToolUseBlockParam
+from .beta_base64_pdf_block_param import BetaBase64PDFBlockParam
from .beta_tool_result_block_param import BetaToolResultBlockParam
__all__ = ["BetaContentBlockParam"]
BetaContentBlockParam: TypeAlias = Union[
- BetaTextBlockParam, BetaImageBlockParam, BetaToolUseBlockParam, BetaToolResultBlockParam
+ BetaTextBlockParam, BetaImageBlockParam, BetaToolUseBlockParam, BetaToolResultBlockParam, BetaBase64PDFBlockParam
]
diff --git a/src/anthropic/types/beta/beta_message_tokens_count.py b/src/anthropic/types/beta/beta_message_tokens_count.py
new file mode 100644
index 00000000..e11daee7
--- /dev/null
+++ b/src/anthropic/types/beta/beta_message_tokens_count.py
@@ -0,0 +1,14 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+
+from ..._models import BaseModel
+
+__all__ = ["BetaMessageTokensCount"]
+
+
+class BetaMessageTokensCount(BaseModel):
+ input_tokens: int
+ """
+ The total number of tokens across the provided list of messages, system prompt,
+ and tools.
+ """
diff --git a/src/anthropic/types/beta/beta_tool_bash_20241022_param.py b/src/anthropic/types/beta/beta_tool_bash_20241022_param.py
index a76dd5b1..82ed02b3 100644
--- a/src/anthropic/types/beta/beta_tool_bash_20241022_param.py
+++ b/src/anthropic/types/beta/beta_tool_bash_20241022_param.py
@@ -12,6 +12,10 @@
class BetaToolBash20241022Param(TypedDict, total=False):
name: Required[Literal["bash"]]
+ """Name of the tool.
+
+ This is how the tool will be called by the model and in tool_use blocks.
+ """
type: Required[Literal["bash_20241022"]]
diff --git a/src/anthropic/types/beta/beta_tool_computer_use_20241022_param.py b/src/anthropic/types/beta/beta_tool_computer_use_20241022_param.py
index b38c4b81..b95472be 100644
--- a/src/anthropic/types/beta/beta_tool_computer_use_20241022_param.py
+++ b/src/anthropic/types/beta/beta_tool_computer_use_20241022_param.py
@@ -12,13 +12,20 @@
class BetaToolComputerUse20241022Param(TypedDict, total=False):
display_height_px: Required[int]
+ """The height of the display in pixels."""
display_width_px: Required[int]
+ """The width of the display in pixels."""
name: Required[Literal["computer"]]
+ """Name of the tool.
+
+ This is how the tool will be called by the model and in tool_use blocks.
+ """
type: Required[Literal["computer_20241022"]]
cache_control: Optional[BetaCacheControlEphemeralParam]
display_number: Optional[int]
+ """The X11 display number (e.g. 0, 1) for the display."""
diff --git a/src/anthropic/types/beta/beta_tool_param.py b/src/anthropic/types/beta/beta_tool_param.py
index b81260ca..a217ea6b 100644
--- a/src/anthropic/types/beta/beta_tool_param.py
+++ b/src/anthropic/types/beta/beta_tool_param.py
@@ -28,6 +28,10 @@ class BetaToolParam(TypedDict, total=False):
"""
name: Required[str]
+ """Name of the tool.
+
+ This is how the tool will be called by the model and in tool_use blocks.
+ """
cache_control: Optional[BetaCacheControlEphemeralParam]
diff --git a/src/anthropic/types/beta/beta_tool_text_editor_20241022_param.py b/src/anthropic/types/beta/beta_tool_text_editor_20241022_param.py
index 06a1aa23..86c93278 100644
--- a/src/anthropic/types/beta/beta_tool_text_editor_20241022_param.py
+++ b/src/anthropic/types/beta/beta_tool_text_editor_20241022_param.py
@@ -12,6 +12,10 @@
class BetaToolTextEditor20241022Param(TypedDict, total=False):
name: Required[Literal["str_replace_editor"]]
+ """Name of the tool.
+
+ This is how the tool will be called by the model and in tool_use blocks.
+ """
type: Required[Literal["text_editor_20241022"]]
diff --git a/src/anthropic/types/beta/message_count_tokens_params.py b/src/anthropic/types/beta/message_count_tokens_params.py
new file mode 100644
index 00000000..4c864ac2
--- /dev/null
+++ b/src/anthropic/types/beta/message_count_tokens_params.py
@@ -0,0 +1,210 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import List, Union, Iterable
+from typing_extensions import Required, Annotated, TypeAlias, TypedDict
+
+from ..._utils import PropertyInfo
+from ..model_param import ModelParam
+from .beta_tool_param import BetaToolParam
+from .beta_message_param import BetaMessageParam
+from ..anthropic_beta_param import AnthropicBetaParam
+from .beta_text_block_param import BetaTextBlockParam
+from .beta_tool_choice_param import BetaToolChoiceParam
+from .beta_tool_bash_20241022_param import BetaToolBash20241022Param
+from .beta_tool_text_editor_20241022_param import BetaToolTextEditor20241022Param
+from .beta_tool_computer_use_20241022_param import BetaToolComputerUse20241022Param
+
+__all__ = ["MessageCountTokensParams", "Tool"]
+
+
+class MessageCountTokensParams(TypedDict, total=False):
+ messages: Required[Iterable[BetaMessageParam]]
+ """Input messages.
+
+ Our models are trained to operate on alternating `user` and `assistant`
+ conversational turns. When creating a new `Message`, you specify the prior
+ conversational turns with the `messages` parameter, and the model then generates
+ the next `Message` in the conversation. Consecutive `user` or `assistant` turns
+ in your request will be combined into a single turn.
+
+ Each input message must be an object with a `role` and `content`. You can
+ specify a single `user`-role message, or you can include multiple `user` and
+ `assistant` messages.
+
+ If the final message uses the `assistant` role, the response content will
+ continue immediately from the content in that message. This can be used to
+ constrain part of the model's response.
+
+ Example with a single `user` message:
+
+ ```json
+ [{ "role": "user", "content": "Hello, Claude" }]
+ ```
+
+ Example with multiple conversational turns:
+
+ ```json
+ [
+ { "role": "user", "content": "Hello there." },
+ { "role": "assistant", "content": "Hi, I'm Claude. How can I help you?" },
+ { "role": "user", "content": "Can you explain LLMs in plain English?" }
+ ]
+ ```
+
+ Example with a partially-filled response from Claude:
+
+ ```json
+ [
+ {
+ "role": "user",
+ "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"
+ },
+ { "role": "assistant", "content": "The best answer is (" }
+ ]
+ ```
+
+ Each input message `content` may be either a single `string` or an array of
+ content blocks, where each block has a specific `type`. Using a `string` for
+ `content` is shorthand for an array of one content block of type `"text"`. The
+ following input messages are equivalent:
+
+ ```json
+ { "role": "user", "content": "Hello, Claude" }
+ ```
+
+ ```json
+ { "role": "user", "content": [{ "type": "text", "text": "Hello, Claude" }] }
+ ```
+
+ Starting with Claude 3 models, you can also send image content blocks:
+
+ ```json
+ {
+ "role": "user",
+ "content": [
+ {
+ "type": "image",
+ "source": {
+ "type": "base64",
+ "media_type": "image/jpeg",
+ "data": "/9j/4AAQSkZJRg..."
+ }
+ },
+ { "type": "text", "text": "What is in this image?" }
+ ]
+ }
+ ```
+
+ We currently support the `base64` source type for images, and the `image/jpeg`,
+ `image/png`, `image/gif`, and `image/webp` media types.
+
+ See [examples](https://docs.anthropic.com/en/api/messages-examples#vision) for
+ more input examples.
+
+ Note that if you want to include a
+ [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use
+ the top-level `system` parameter — there is no `"system"` role for input
+ messages in the Messages API.
+ """
+
+ model: Required[ModelParam]
+ """
+ The model that will complete your prompt.\n\nSee
+ [models](https://docs.anthropic.com/en/docs/models-overview) for additional
+ details and options.
+ """
+
+ system: Union[str, Iterable[BetaTextBlockParam]]
+ """System prompt.
+
+ A system prompt is a way of providing context and instructions to Claude, such
+ as specifying a particular goal or role. See our
+ [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
+ """
+
+ tool_choice: BetaToolChoiceParam
+ """How the model should use the provided tools.
+
+ The model can use a specific tool, any available tool, or decide by itself.
+ """
+
+ tools: Iterable[Tool]
+ """Definitions of tools that the model may use.
+
+ If you include `tools` in your API request, the model may return `tool_use`
+ content blocks that represent the model's use of those tools. You can then run
+ those tools using the tool input generated by the model and then optionally
+ return results back to the model using `tool_result` content blocks.
+
+ Each tool definition includes:
+
+ - `name`: Name of the tool.
+ - `description`: Optional, but strongly-recommended description of the tool.
+ - `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input`
+ shape that the model will produce in `tool_use` output content blocks.
+
+ For example, if you defined `tools` as:
+
+ ```json
+ [
+ {
+ "name": "get_stock_price",
+ "description": "Get the current stock price for a given ticker symbol.",
+ "input_schema": {
+ "type": "object",
+ "properties": {
+ "ticker": {
+ "type": "string",
+ "description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
+ }
+ },
+ "required": ["ticker"]
+ }
+ }
+ ]
+ ```
+
+ And then asked the model "What's the S&P 500 at today?", the model might produce
+ `tool_use` content blocks in the response like this:
+
+ ```json
+ [
+ {
+ "type": "tool_use",
+ "id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ "name": "get_stock_price",
+ "input": { "ticker": "^GSPC" }
+ }
+ ]
+ ```
+
+ You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an
+ input, and return the following back to the model in a subsequent `user`
+ message:
+
+ ```json
+ [
+ {
+ "type": "tool_result",
+ "tool_use_id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ "content": "259.75 USD"
+ }
+ ]
+ ```
+
+ Tools can be used for workflows that include running client-side tools and
+ functions, or more generally whenever you want the model to produce a particular
+ JSON structure of output.
+
+ See our [guide](https://docs.anthropic.com/en/docs/tool-use) for more details.
+ """
+
+ betas: Annotated[List[AnthropicBetaParam], PropertyInfo(alias="anthropic-beta")]
+ """Optional header to specify the beta version(s) you want to use."""
+
+
+Tool: TypeAlias = Union[
+ BetaToolParam, BetaToolComputerUse20241022Param, BetaToolBash20241022Param, BetaToolTextEditor20241022Param
+]
diff --git a/src/anthropic/types/beta/prompt_caching/prompt_caching_beta_tool_param.py b/src/anthropic/types/beta/prompt_caching/prompt_caching_beta_tool_param.py
index f9feb37f..cfd9f8aa 100644
--- a/src/anthropic/types/beta/prompt_caching/prompt_caching_beta_tool_param.py
+++ b/src/anthropic/types/beta/prompt_caching/prompt_caching_beta_tool_param.py
@@ -28,6 +28,10 @@ class PromptCachingBetaToolParam(TypedDict, total=False):
"""
name: Required[str]
+ """Name of the tool.
+
+ This is how the tool will be called by the model and in tool_use blocks.
+ """
cache_control: Optional[PromptCachingBetaCacheControlEphemeralParam]
diff --git a/src/anthropic/types/tool_param.py b/src/anthropic/types/tool_param.py
index ef7441e6..35a95516 100644
--- a/src/anthropic/types/tool_param.py
+++ b/src/anthropic/types/tool_param.py
@@ -26,6 +26,10 @@ class ToolParam(TypedDict, total=False):
"""
name: Required[str]
+ """Name of the tool.
+
+ This is how the tool will be called by the model and in tool_use blocks.
+ """
description: str
"""Description of what this tool does.
diff --git a/tests/api_resources/beta/messages/test_batches.py b/tests/api_resources/beta/messages/test_batches.py
index 3ec2f8b7..12c5cae0 100644
--- a/tests/api_resources/beta/messages/test_batches.py
+++ b/tests/api_resources/beta/messages/test_batches.py
@@ -37,7 +37,7 @@ def test_method_create(self, client: Anthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
{
@@ -50,7 +50,7 @@ def test_method_create(self, client: Anthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
{
@@ -63,7 +63,7 @@ def test_method_create(self, client: Anthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
],
@@ -84,7 +84,7 @@ def test_method_create_with_all_params(self, client: Anthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
"metadata": {"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
"stop_sequences": ["string", "string", "string"],
"stream": False,
@@ -173,7 +173,7 @@ def test_method_create_with_all_params(self, client: Anthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
"metadata": {"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
"stop_sequences": ["string", "string", "string"],
"stream": False,
@@ -262,7 +262,7 @@ def test_method_create_with_all_params(self, client: Anthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
"metadata": {"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
"stop_sequences": ["string", "string", "string"],
"stream": False,
@@ -360,7 +360,7 @@ def test_raw_response_create(self, client: Anthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
{
@@ -373,7 +373,7 @@ def test_raw_response_create(self, client: Anthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
{
@@ -386,7 +386,7 @@ def test_raw_response_create(self, client: Anthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
],
@@ -411,7 +411,7 @@ def test_streaming_response_create(self, client: Anthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
{
@@ -424,7 +424,7 @@ def test_streaming_response_create(self, client: Anthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
{
@@ -437,7 +437,7 @@ def test_streaming_response_create(self, client: Anthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
],
@@ -634,7 +634,7 @@ async def test_method_create(self, async_client: AsyncAnthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
{
@@ -647,7 +647,7 @@ async def test_method_create(self, async_client: AsyncAnthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
{
@@ -660,7 +660,7 @@ async def test_method_create(self, async_client: AsyncAnthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
],
@@ -681,7 +681,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncAnthropic)
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
"metadata": {"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
"stop_sequences": ["string", "string", "string"],
"stream": False,
@@ -770,7 +770,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncAnthropic)
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
"metadata": {"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
"stop_sequences": ["string", "string", "string"],
"stream": False,
@@ -859,7 +859,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncAnthropic)
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
"metadata": {"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
"stop_sequences": ["string", "string", "string"],
"stream": False,
@@ -957,7 +957,7 @@ async def test_raw_response_create(self, async_client: AsyncAnthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
{
@@ -970,7 +970,7 @@ async def test_raw_response_create(self, async_client: AsyncAnthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
{
@@ -983,7 +983,7 @@ async def test_raw_response_create(self, async_client: AsyncAnthropic) -> None:
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
],
@@ -1008,7 +1008,7 @@ async def test_streaming_response_create(self, async_client: AsyncAnthropic) ->
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
{
@@ -1021,7 +1021,7 @@ async def test_streaming_response_create(self, async_client: AsyncAnthropic) ->
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
{
@@ -1034,7 +1034,7 @@ async def test_streaming_response_create(self, async_client: AsyncAnthropic) ->
"role": "user",
}
],
- "model": "claude-3-5-sonnet-20240620",
+ "model": "claude-3-5-sonnet-20241022",
},
},
],
diff --git a/tests/api_resources/beta/prompt_caching/test_messages.py b/tests/api_resources/beta/prompt_caching/test_messages.py
index 857b0c28..edd8ecf3 100644
--- a/tests/api_resources/beta/prompt_caching/test_messages.py
+++ b/tests/api_resources/beta/prompt_caching/test_messages.py
@@ -27,7 +27,7 @@ def test_method_create_overload_1(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
)
assert_matches_type(PromptCachingBetaMessage, message, path=["response"])
@@ -41,7 +41,7 @@ def test_method_create_with_all_params_overload_1(self, client: Anthropic) -> No
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
stop_sequences=["string", "string", "string"],
stream=False,
@@ -129,7 +129,7 @@ def test_raw_response_create_overload_1(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
)
assert response.is_closed is True
@@ -147,7 +147,7 @@ def test_streaming_response_create_overload_1(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -167,7 +167,7 @@ def test_method_create_overload_2(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
)
message_stream.response.close()
@@ -182,7 +182,7 @@ def test_method_create_with_all_params_overload_2(self, client: Anthropic) -> No
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
stop_sequences=["string", "string", "string"],
@@ -270,7 +270,7 @@ def test_raw_response_create_overload_2(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
)
@@ -288,7 +288,7 @@ def test_streaming_response_create_overload_2(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
) as response:
assert not response.is_closed
@@ -313,7 +313,7 @@ async def test_method_create_overload_1(self, async_client: AsyncAnthropic) -> N
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
)
assert_matches_type(PromptCachingBetaMessage, message, path=["response"])
@@ -327,7 +327,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
stop_sequences=["string", "string", "string"],
stream=False,
@@ -415,7 +415,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncAnthropic
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
)
assert response.is_closed is True
@@ -433,7 +433,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncAnt
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -453,7 +453,7 @@ async def test_method_create_overload_2(self, async_client: AsyncAnthropic) -> N
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
)
await message_stream.response.aclose()
@@ -468,7 +468,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
stop_sequences=["string", "string", "string"],
@@ -556,7 +556,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncAnthropic
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
)
@@ -574,7 +574,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncAnt
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
) as response:
assert not response.is_closed
diff --git a/tests/api_resources/beta/test_messages.py b/tests/api_resources/beta/test_messages.py
index 7fc29be4..17c82308 100644
--- a/tests/api_resources/beta/test_messages.py
+++ b/tests/api_resources/beta/test_messages.py
@@ -9,7 +9,10 @@
from anthropic import Anthropic, AsyncAnthropic
from tests.utils import assert_matches_type
-from anthropic.types.beta import BetaMessage
+from anthropic.types.beta import (
+ BetaMessage,
+ BetaMessageTokensCount,
+)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -27,7 +30,7 @@ def test_method_create_overload_1(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
)
assert_matches_type(BetaMessage, message, path=["response"])
@@ -41,7 +44,7 @@ def test_method_create_with_all_params_overload_1(self, client: Anthropic) -> No
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
stop_sequences=["string", "string", "string"],
stream=False,
@@ -132,7 +135,7 @@ def test_raw_response_create_overload_1(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
)
assert response.is_closed is True
@@ -150,7 +153,7 @@ def test_streaming_response_create_overload_1(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -170,7 +173,7 @@ def test_method_create_overload_2(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
)
message_stream.response.close()
@@ -185,7 +188,7 @@ def test_method_create_with_all_params_overload_2(self, client: Anthropic) -> No
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
stop_sequences=["string", "string", "string"],
@@ -276,7 +279,7 @@ def test_raw_response_create_overload_2(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
)
@@ -294,7 +297,7 @@ def test_streaming_response_create_overload_2(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
) as response:
assert not response.is_closed
@@ -305,6 +308,171 @@ def test_streaming_response_create_overload_2(self, client: Anthropic) -> None:
assert cast(Any, response.is_closed) is True
+ @parametrize
+ def test_method_count_tokens(self, client: Anthropic) -> None:
+ message = client.beta.messages.count_tokens(
+ messages=[
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ ],
+ model="string",
+ )
+ assert_matches_type(BetaMessageTokensCount, message, path=["response"])
+
+ @parametrize
+ def test_method_count_tokens_with_all_params(self, client: Anthropic) -> None:
+ message = client.beta.messages.count_tokens(
+ messages=[
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ ],
+ model="string",
+ system=[
+ {
+ "text": "Today's date is 2024-06-01.",
+ "type": "text",
+ "cache_control": {"type": "ephemeral"},
+ }
+ ],
+ tool_choice={
+ "type": "auto",
+ "disable_parallel_tool_use": True,
+ },
+ tools=[
+ {
+ "input_schema": {
+ "type": "object",
+ "properties": {
+ "location": {
+ "description": "The city and state, e.g. San Francisco, CA",
+ "type": "string",
+ },
+ "unit": {
+ "description": "Unit for the output - one of (celsius, fahrenheit)",
+ "type": "string",
+ },
+ },
+ },
+ "name": "x",
+ "cache_control": {"type": "ephemeral"},
+ "description": "Get the current weather in a given location",
+ "type": "custom",
+ },
+ {
+ "input_schema": {
+ "type": "object",
+ "properties": {
+ "location": {
+ "description": "The city and state, e.g. San Francisco, CA",
+ "type": "string",
+ },
+ "unit": {
+ "description": "Unit for the output - one of (celsius, fahrenheit)",
+ "type": "string",
+ },
+ },
+ },
+ "name": "x",
+ "cache_control": {"type": "ephemeral"},
+ "description": "Get the current weather in a given location",
+ "type": "custom",
+ },
+ {
+ "input_schema": {
+ "type": "object",
+ "properties": {
+ "location": {
+ "description": "The city and state, e.g. San Francisco, CA",
+ "type": "string",
+ },
+ "unit": {
+ "description": "Unit for the output - one of (celsius, fahrenheit)",
+ "type": "string",
+ },
+ },
+ },
+ "name": "x",
+ "cache_control": {"type": "ephemeral"},
+ "description": "Get the current weather in a given location",
+ "type": "custom",
+ },
+ ],
+ betas=["string", "string", "string"],
+ )
+ assert_matches_type(BetaMessageTokensCount, message, path=["response"])
+
+ @parametrize
+ def test_raw_response_count_tokens(self, client: Anthropic) -> None:
+ response = client.beta.messages.with_raw_response.count_tokens(
+ messages=[
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ ],
+ model="string",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ message = response.parse()
+ assert_matches_type(BetaMessageTokensCount, message, path=["response"])
+
+ @parametrize
+ def test_streaming_response_count_tokens(self, client: Anthropic) -> None:
+ with client.beta.messages.with_streaming_response.count_tokens(
+ messages=[
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ ],
+ model="string",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ message = response.parse()
+ assert_matches_type(BetaMessageTokensCount, message, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
class TestAsyncMessages:
parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
@@ -319,7 +487,7 @@ async def test_method_create_overload_1(self, async_client: AsyncAnthropic) -> N
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
)
assert_matches_type(BetaMessage, message, path=["response"])
@@ -333,7 +501,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
stop_sequences=["string", "string", "string"],
stream=False,
@@ -424,7 +592,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncAnthropic
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
)
assert response.is_closed is True
@@ -442,7 +610,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncAnt
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -462,7 +630,7 @@ async def test_method_create_overload_2(self, async_client: AsyncAnthropic) -> N
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
)
await message_stream.response.aclose()
@@ -477,7 +645,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
stop_sequences=["string", "string", "string"],
@@ -568,7 +736,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncAnthropic
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
)
@@ -586,7 +754,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncAnt
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
) as response:
assert not response.is_closed
@@ -596,3 +764,168 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncAnt
await stream.close()
assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_method_count_tokens(self, async_client: AsyncAnthropic) -> None:
+ message = await async_client.beta.messages.count_tokens(
+ messages=[
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ ],
+ model="string",
+ )
+ assert_matches_type(BetaMessageTokensCount, message, path=["response"])
+
+ @parametrize
+ async def test_method_count_tokens_with_all_params(self, async_client: AsyncAnthropic) -> None:
+ message = await async_client.beta.messages.count_tokens(
+ messages=[
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ ],
+ model="string",
+ system=[
+ {
+ "text": "Today's date is 2024-06-01.",
+ "type": "text",
+ "cache_control": {"type": "ephemeral"},
+ }
+ ],
+ tool_choice={
+ "type": "auto",
+ "disable_parallel_tool_use": True,
+ },
+ tools=[
+ {
+ "input_schema": {
+ "type": "object",
+ "properties": {
+ "location": {
+ "description": "The city and state, e.g. San Francisco, CA",
+ "type": "string",
+ },
+ "unit": {
+ "description": "Unit for the output - one of (celsius, fahrenheit)",
+ "type": "string",
+ },
+ },
+ },
+ "name": "x",
+ "cache_control": {"type": "ephemeral"},
+ "description": "Get the current weather in a given location",
+ "type": "custom",
+ },
+ {
+ "input_schema": {
+ "type": "object",
+ "properties": {
+ "location": {
+ "description": "The city and state, e.g. San Francisco, CA",
+ "type": "string",
+ },
+ "unit": {
+ "description": "Unit for the output - one of (celsius, fahrenheit)",
+ "type": "string",
+ },
+ },
+ },
+ "name": "x",
+ "cache_control": {"type": "ephemeral"},
+ "description": "Get the current weather in a given location",
+ "type": "custom",
+ },
+ {
+ "input_schema": {
+ "type": "object",
+ "properties": {
+ "location": {
+ "description": "The city and state, e.g. San Francisco, CA",
+ "type": "string",
+ },
+ "unit": {
+ "description": "Unit for the output - one of (celsius, fahrenheit)",
+ "type": "string",
+ },
+ },
+ },
+ "name": "x",
+ "cache_control": {"type": "ephemeral"},
+ "description": "Get the current weather in a given location",
+ "type": "custom",
+ },
+ ],
+ betas=["string", "string", "string"],
+ )
+ assert_matches_type(BetaMessageTokensCount, message, path=["response"])
+
+ @parametrize
+ async def test_raw_response_count_tokens(self, async_client: AsyncAnthropic) -> None:
+ response = await async_client.beta.messages.with_raw_response.count_tokens(
+ messages=[
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ ],
+ model="string",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ message = response.parse()
+ assert_matches_type(BetaMessageTokensCount, message, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_count_tokens(self, async_client: AsyncAnthropic) -> None:
+ async with async_client.beta.messages.with_streaming_response.count_tokens(
+ messages=[
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ {
+ "content": "string",
+ "role": "user",
+ },
+ ],
+ model="string",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ message = await response.parse()
+ assert_matches_type(BetaMessageTokensCount, message, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_messages.py b/tests/api_resources/test_messages.py
index 642e3ee9..5219f7c4 100644
--- a/tests/api_resources/test_messages.py
+++ b/tests/api_resources/test_messages.py
@@ -28,7 +28,7 @@ def test_method_create_overload_1(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
)
assert_matches_type(Message, message, path=["response"])
@@ -42,7 +42,7 @@ def test_method_create_with_all_params_overload_1(self, client: Anthropic) -> No
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
stop_sequences=["string", "string", "string"],
stream=False,
@@ -125,7 +125,7 @@ def test_raw_response_create_overload_1(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
)
assert response.is_closed is True
@@ -143,7 +143,7 @@ def test_streaming_response_create_overload_1(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -163,7 +163,7 @@ def test_method_create_overload_2(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
)
message_stream.response.close()
@@ -178,7 +178,7 @@ def test_method_create_with_all_params_overload_2(self, client: Anthropic) -> No
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
stop_sequences=["string", "string", "string"],
@@ -261,7 +261,7 @@ def test_raw_response_create_overload_2(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
)
@@ -279,7 +279,7 @@ def test_streaming_response_create_overload_2(self, client: Anthropic) -> None:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
) as response:
assert not response.is_closed
@@ -314,7 +314,7 @@ async def test_method_create_overload_1(self, async_client: AsyncAnthropic) -> N
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
)
assert_matches_type(Message, message, path=["response"])
@@ -328,7 +328,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
stop_sequences=["string", "string", "string"],
stream=False,
@@ -411,7 +411,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncAnthropic
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
)
assert response.is_closed is True
@@ -429,7 +429,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncAnt
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -449,7 +449,7 @@ async def test_method_create_overload_2(self, async_client: AsyncAnthropic) -> N
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
)
await message_stream.response.aclose()
@@ -464,7 +464,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"},
stop_sequences=["string", "string", "string"],
@@ -547,7 +547,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncAnthropic
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
)
@@ -565,7 +565,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncAnt
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
stream=True,
) as response:
assert not response.is_closed
diff --git a/tests/test_client.py b/tests/test_client.py
index 569974c7..ef682a44 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -905,7 +905,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
)
assert response.retries_taken == failures_before_success
@@ -938,7 +938,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
extra_headers={"x-stainless-retry-count": Omit()},
)
@@ -971,7 +971,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
extra_headers={"x-stainless-retry-count": "42"},
)
@@ -1004,7 +1004,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
) as response:
assert response.retries_taken == failures_before_success
assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success
@@ -1881,7 +1881,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
)
assert response.retries_taken == failures_before_success
@@ -1915,7 +1915,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
extra_headers={"x-stainless-retry-count": Omit()},
)
@@ -1949,7 +1949,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
extra_headers={"x-stainless-retry-count": "42"},
)
@@ -1983,7 +1983,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
"role": "user",
}
],
- model="claude-3-5-sonnet-20240620",
+ model="claude-3-5-sonnet-20241022",
) as response:
assert response.retries_taken == failures_before_success
assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success