Skip to content

Commit

Permalink
Add AWS bedrock
Browse files Browse the repository at this point in the history
  • Loading branch information
cpsievert committed Oct 29, 2024
1 parent 956643a commit 23e7b98
Show file tree
Hide file tree
Showing 7 changed files with 204 additions and 11 deletions.
22 changes: 19 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -215,11 +215,27 @@ from chatlas import ChatOllama
chat = ChatOllama(model="llama3.2")
```

<!--
### AWS Bedrock

TODO: Implement Anthropic Bedrock
-->
[AWS Bedrock](https://aws.amazon.com/bedrock/) provides a number of chat based models, including those Anthropic's [Claude](https://aws.amazon.com/bedrock/claude/). To use AWS Bedrock, you'll need the `anthropic` Python package:

```python
pip install anthropic[bedrock]
```

Then, pass along information about your AWS deployment to the `ChatBedrockAnthropic` constructor. Also, see [here](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html) for a more detailed explanation of how to properly manage your AWS credentials.

```python
from chatlas import ChatBedrockAnthropic

chat = ChatBedrockAnthropic(
aws_profile='...',
aws_region='us-east'
aws_secret_key='...',
aws_access_key='...',
aws_session_token='...',
)
```


### Azure
Expand Down
129 changes: 127 additions & 2 deletions chatlas/_anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,9 @@
from anthropic.types.tool_use_block_param import ToolUseBlockParam

from .types._anthropic_client import ProviderClientArgs
from .types._anthropic_client_bedrock import (
ProviderClientArgs as BedrockProviderArgs,
)
from .types._anthropic_create import CreateCompletionArgs

ContentBlockParam = Union[
Expand Down Expand Up @@ -98,7 +101,7 @@ def ChatAnthropic(
model = inform_model_default("claude-3-5-sonnet-latest")

return Chat(
provider=ClaudeProvider(
provider=AnthropicProvider(
api_key=api_key,
model=model,
max_tokens=max_tokens,
Expand All @@ -111,7 +114,7 @@ def ChatAnthropic(
)


class ClaudeProvider(Provider[Message, RawMessageStreamEvent, Message]):
class AnthropicProvider(Provider[Message, RawMessageStreamEvent, Message]):
def __init__(
self,
*,
Expand Down Expand Up @@ -356,3 +359,125 @@ def _as_turn(completion: Message) -> Turn:
tokens_log("Anthropic", tokens)

return Turn("assistant", contents, tokens=tokens)


def ChatBedrockAnthropic(
*,
model: Optional[str] = None,
aws_secret_key: Optional[str] = None,
aws_access_key: Optional[str] = None,
aws_region: Optional[str] = None,
aws_profile: Optional[str] = None,
aws_session_token: Optional[str] = None,
base_url: Optional[str] = None,
system_prompt: Optional[str] = None,
turns: Optional[list[Turn]] = None,
kwargs: Optional["BedrockProviderArgs"] = None,
) -> Chat["CreateCompletionArgs"]:
"""
Chat with an AWS bedrock model
[AWS Bedrock](https://aws.amazon.com/bedrock/) provides a number of chat
based models, including those Anthropic's
[Claude](https://aws.amazon.com/bedrock/claude/).
Parameters
----------
model
The model to use for the chat.
aws_secret_key
The AWS secret key to use for authentication.
aws_access_key
The AWS access key to use for authentication.
aws_region
The AWS region to use. Defaults to the AWS_REGION environment variable.
If that is not set, defaults to `'us-east-1'`.
aws_profile
The AWS profile to use.
aws_session_token
The AWS session token to use.
base_url
The base URL to use. Defaults to the ANTHROPIC_BEDROCK_BASE_URL
environment variable. If that is not set, defaults to
`f"https://bedrock-runtime.{aws_region}.amazonaws.com"`.
system_prompt
A system prompt to set the behavior of the assistant.
turns
A list of turns to start the chat with (i.e., continuing a previous
conversation). If not provided, the conversation begins from scratch. Do
not provide non-None values for both `turns` and `system_prompt`. Each
message in the list should be a dictionary with at least `role` (usually
`system`, `user`, or `assistant`, but `tool` is also possible). Normally
there is also a `content` field, which is a string.
kwargs
Additional arguments to pass to the `anthropic.AnthropicBedrock()`
client constructor.
Returns
-------
Chat
A Chat object.
Note
----
For more information on configuring AWS credentials, see
<https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html>
"""

if model is None:
# Default model from https://github.com/anthropics/anthropic-sdk-python?tab=readme-ov-file#aws-bedrock
model = inform_model_default("anthropic.claude-3-sonnet-20240229-v1:0")

return Chat(
provider=AnthropicBedrockProvider(
model=model,
aws_secret_key=aws_secret_key,
aws_access_key=aws_access_key,
aws_region=aws_region,
aws_profile=aws_profile,
aws_session_token=aws_session_token,
base_url=base_url,
kwargs=kwargs,
),
turns=normalize_turns(
turns or [],
system_prompt,
),
)


class AnthropicBedrockProvider(AnthropicProvider):
def __init__(
self,
*,
model: str,
aws_secret_key: str | None,
aws_access_key: str | None,
aws_region: str | None,
aws_profile: str | None,
aws_session_token: str | None,
base_url: str | None,
kwargs: Optional["BedrockProviderArgs"] = None,
):
try:
from anthropic import AnthropicBedrock, AsyncAnthropicBedrock
except ImportError:
raise ImportError(
"`ChatBedrockAnthropic()` requires the `anthropic` package. "
"Install it with `pip install anthropic[bedrock]`."
)

self._model = model

kwargs_full: "BedrockProviderArgs" = {
"aws_secret_key": aws_secret_key,
"aws_access_key": aws_access_key,
"aws_region": aws_region,
"aws_profile": aws_profile,
"aws_session_token": aws_session_token,
"base_url": base_url,
**(kwargs or {}),
}

self._client = AnthropicBedrock(**kwargs_full) # type: ignore
self._async_client = AsyncAnthropicBedrock(**kwargs_full) # type: ignore
2 changes: 1 addition & 1 deletion chatlas/_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,7 @@ def ChatAzureOpenAI(
system_prompt: Optional[str] = None,
turns: Optional[list[Turn]] = None,
kwargs: Optional["AzureProviderArgs"] = None,
) -> Chat["AzureProviderArgs"]:
) -> Chat["ChatCompletionArgs"]:
"""
Chat with a model hosted on Azure OpenAI.
Expand Down
23 changes: 23 additions & 0 deletions chatlas/types/_anthropic_client_bedrock.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# ---------------------------------------------------------
# Do not modify this file. It was generated by `scripts/generate_typed_dicts.py`.
# ---------------------------------------------------------

from typing import Mapping, Optional, TypedDict

import anthropic
import httpx


class ProviderClientArgs(TypedDict, total=False):
aws_secret_key: str | None
aws_access_key: str | None
aws_region: str | None
aws_profile: str | None
aws_session_token: str | None
base_url: str | httpx.URL | None
timeout: float | anthropic.Timeout | None | anthropic.NotGiven
max_retries: int
default_headers: Optional[Mapping[str, str]]
default_query: Optional[Mapping[str, object]]
http_client: httpx.AsyncClient
_strict_response_validation: bool
13 changes: 9 additions & 4 deletions chatlas/types/_anthropic_create.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,11 @@

import anthropic
import anthropic._types
import anthropic.types.message_create_params
import anthropic.types.message_param
import anthropic.types.text_block_param
import anthropic.types.tool_choice_any_param
import anthropic.types.tool_choice_auto_param
import anthropic.types.tool_choice_tool_param
import anthropic.types.tool_param


Expand All @@ -19,7 +21,10 @@ class CreateCompletionArgs(TypedDict, total=False):
model: Union[
str,
Literal[
"claude-3-5-sonnet-latest",
"claude-3-5-sonnet-20241022",
"claude-3-5-sonnet-20240620",
"claude-3-opus-latest",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
Expand All @@ -37,9 +42,9 @@ class CreateCompletionArgs(TypedDict, total=False):
]
temperature: float | anthropic.NotGiven
tool_choice: Union[
anthropic.types.message_create_params.ToolChoiceToolChoiceAuto,
anthropic.types.message_create_params.ToolChoiceToolChoiceAny,
anthropic.types.message_create_params.ToolChoiceToolChoiceTool,
anthropic.types.tool_choice_auto_param.ToolChoiceAutoParam,
anthropic.types.tool_choice_any_param.ToolChoiceAnyParam,
anthropic.types.tool_choice_tool_param.ToolChoiceToolParam,
anthropic.NotGiven,
]
tools: Union[Iterable[anthropic.types.tool_param.ToolParam], anthropic.NotGiven]
Expand Down
10 changes: 10 additions & 0 deletions chatlas/types/_openai_create.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import openai
import openai._types
import openai.types.chat.chat_completion_assistant_message_param
import openai.types.chat.chat_completion_audio_param
import openai.types.chat.chat_completion_function_call_option_param
import openai.types.chat.chat_completion_function_message_param
import openai.types.chat.chat_completion_named_tool_choice_param
Expand Down Expand Up @@ -42,7 +43,10 @@ class ChatCompletionArgs(TypedDict, total=False):
"gpt-4o",
"gpt-4o-2024-08-06",
"gpt-4o-2024-05-13",
"gpt-4o-realtime-preview",
"gpt-4o-realtime-preview-2024-10-01",
"gpt-4o-audio-preview",
"gpt-4o-audio-preview-2024-10-01",
"chatgpt-4o-latest",
"gpt-4o-mini",
"gpt-4o-mini-2024-07-18",
Expand All @@ -67,6 +71,11 @@ class ChatCompletionArgs(TypedDict, total=False):
"gpt-3.5-turbo-16k-0613",
],
]
audio: Union[
openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam,
None,
openai.NotGiven,
]
frequency_penalty: Union[float, None, openai.NotGiven]
function_call: Union[
Literal["none", "auto"],
Expand All @@ -81,6 +90,7 @@ class ChatCompletionArgs(TypedDict, total=False):
max_completion_tokens: Union[int, None, openai.NotGiven]
max_tokens: Union[int, None, openai.NotGiven]
metadata: Union[dict[str, str], None, openai.NotGiven]
modalities: Union[list[Literal["text", "audio"]], None, openai.NotGiven]
n: Union[int, None, openai.NotGiven]
parallel_tool_calls: bool | openai.NotGiven
presence_penalty: Union[float, None, openai.NotGiven]
Expand Down
16 changes: 15 additions & 1 deletion scripts/_generate_anthropic_types.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from pathlib import Path

import httpx
from anthropic import AsyncAnthropic
from anthropic import AsyncAnthropic, AsyncAnthropicBedrock
from anthropic.resources import AsyncMessages

from _utils import generate_typeddict_code, write_code_to_file
Expand Down Expand Up @@ -34,3 +34,17 @@
init_args,
src_dir / "types" / "_anthropic_client.py",
)


init_args = generate_typeddict_code(
AsyncAnthropicBedrock.__init__,
"ProviderClientArgs",
excluded_fields={"self"},
localns={"URL": httpx.URL},
)

write_code_to_file(
init_args,
src_dir / "types" / "_anthropic_client_bedrock.py",
setup_code="import anthropic",
)

0 comments on commit 23e7b98

Please sign in to comment.