Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Anthropic Client #2931

Merged
merged 32 commits into from
Jun 19, 2024
Merged
Show file tree
Hide file tree
Changes from 30 commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
40c7221
intial setup for the anthropic client with cost config
Hk669 Jun 12, 2024
75b98e8
update: other methods added
Hk669 Jun 12, 2024
fcbbfa1
fix: formatting
Hk669 Jun 12, 2024
ab75440
fix: config unused
Hk669 Jun 12, 2024
08db262
update: changes made in the client
Hk669 Jun 13, 2024
0381cfe
update: test added to the workflow
Hk669 Jun 13, 2024
1da92cc
update: added tests to the anthropic client
Hk669 Jun 13, 2024
7857cf4
fix: errors in workflows and client
Hk669 Jun 13, 2024
e2d93a7
fix
Hk669 Jun 13, 2024
72b1c77
fix: anthropic tools type
Hk669 Jun 13, 2024
c86baae
update: notebook anthropic
Hk669 Jun 13, 2024
8c192de
Nonetype fixed
Hk669 Jun 13, 2024
6d18f7c
fix-tests config
Hk669 Jun 13, 2024
285a5b1
update: tests and client issues
Hk669 Jun 13, 2024
578f353
logger support
Hk669 Jun 13, 2024
8de935f
remove sys path
Hk669 Jun 13, 2024
5bcf32d
updated the functioning of the client
Hk669 Jun 13, 2024
654165d
update: type hints and stream
Hk669 Jun 14, 2024
3fc8431
skip tests- importerror
Hk669 Jun 14, 2024
a34c002
fix: anthropic client and tests
Hk669 Jun 14, 2024
d20f40c
none fix
Hk669 Jun 14, 2024
fd1ee06
Alternating roles, parameter keywords, cost on response,
marklysze Jun 15, 2024
bd01db1
update: anthropic notebook
Hk669 Jun 15, 2024
9df85ec
update: notebook with more details
Hk669 Jun 15, 2024
57baf05
devcontainer
Hk669 Jun 17, 2024
17f7385
Merge branch 'main' into anthropic_client
Hk669 Jun 18, 2024
08e0862
Merge branch 'main' of https://github.com/microsoft/autogen into anth…
Hk669 Jun 18, 2024
23c51f0
Merge branch 'anthropic_client' of https://github.com/microsoft/autog…
Hk669 Jun 18, 2024
d2136cc
update: added validate_params from the client_utils
Hk669 Jun 18, 2024
586dbb6
fix: formatting
Hk669 Jun 18, 2024
dc1b1c3
fix: minor comment
Hk669 Jun 19, 2024
287a9a2
Merge branch 'main' into anthropic_client
Hk669 Jun 19, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 43 additions & 0 deletions .github/workflows/contrib-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -475,3 +475,46 @@ jobs:
with:
file: ./coverage.xml
flags: unittests


AnthropicTest:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: ["ubuntu-latest", "windows-latest", "macos-latest"]
python-version: ["3.9", "3.10", "3.11", "3.12"]

steps:
- uses: actions/checkout@v4
with:
lfs: true
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install packages and dependencies for all tests
run: |
python -m pip install --upgrade pip wheel
pip install pytest-cov>=5

- name: Install packages and dependencies for Anthropic
run: |
pip install -e .[test]
pip install -e .[anthropic]

- name: Set AUTOGEN_USE_DOCKER based on OS
shell: bash
run: |
if [[ ${{ matrix.os }} != ubuntu-latest ]]; then
echo "AUTOGEN_USE_DOCKER=False" >> $GITHUB_ENV
fi

- name: Coverage
run: |
pytest test/oai/test_anthropic.py --skip-openai
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
file: ./coverage.xml
flags: unittests
6 changes: 5 additions & 1 deletion autogen/logger/file_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

if TYPE_CHECKING:
from autogen import Agent, ConversableAgent, OpenAIWrapper
from autogen.oai.anthropic import AnthropicClient
from autogen.oai.gemini import GeminiClient

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -200,7 +201,10 @@ def log_new_wrapper(
self.logger.error(f"[file_logger] Failed to log event {e}")

def log_new_client(
self, client: AzureOpenAI | OpenAI | GeminiClient, wrapper: OpenAIWrapper, init_args: Dict[str, Any]
self,
client: AzureOpenAI | OpenAI | GeminiClient | AnthropicClient,
wrapper: OpenAIWrapper,
init_args: Dict[str, Any],
) -> None:
"""
Log a new client instance.
Expand Down
6 changes: 5 additions & 1 deletion autogen/logger/sqlite_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

if TYPE_CHECKING:
from autogen import Agent, ConversableAgent, OpenAIWrapper
from autogen.oai.anthropic import AnthropicClient
from autogen.oai.gemini import GeminiClient

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -387,7 +388,10 @@ def log_function_use(self, source: Union[str, Agent], function: F, args: Dict[st
self._run_query(query=query, args=query_args)

def log_new_client(
self, client: Union[AzureOpenAI, OpenAI, GeminiClient], wrapper: OpenAIWrapper, init_args: Dict[str, Any]
self,
client: Union[AzureOpenAI, OpenAI, GeminiClient, AnthropicClient],
wrapper: OpenAIWrapper,
init_args: Dict[str, Any],
) -> None:
if self.con is None:
return
Expand Down
274 changes: 274 additions & 0 deletions autogen/oai/anthropic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,274 @@
"""
Create an OpenAI API client for the Anthropic API.
Hk669 marked this conversation as resolved.
Show resolved Hide resolved

Example usage:
Install the `anthropic` package by running `pip install --upgrade anthropic`.
- https://docs.anthropic.com/en/docs/quickstart-guide

import autogen

config_list = [
{
"model": "claude-3-sonnet-20240229",
"api_key": os.getenv("ANTHROPIC_API_KEY"),
"api_type": "anthropic",
}
]

assistant = autogen.AssistantAgent("assistant", llm_config={"config_list": config_list})
"""

from __future__ import annotations

import copy
import inspect
import json
import os
import warnings
from typing import Any, Dict, List, Tuple, Union

from anthropic import Anthropic
from anthropic import __version__ as anthropic_version
from anthropic.types import Completion, Message
from client_utils import validate_parameter
from openai.types.chat import ChatCompletion, ChatCompletionMessageToolCall
from openai.types.chat.chat_completion import ChatCompletionMessage, Choice
from typing_extensions import Annotated

TOOL_ENABLED = anthropic_version >= "0.23.1"
if TOOL_ENABLED:
from anthropic.types.tool_use_block_param import (
ToolUseBlockParam,
)


ANTHROPIC_PRICING_1k = {
"claude-3-sonnet-20240229": (0.003, 0.015),
"claude-3-opus-20240229": (0.015, 0.075),
"claude-2.0": (0.008, 0.024),
"claude-2.1": (0.008, 0.024),
"claude-3.0-opus": (0.015, 0.075),
"claude-3.0-haiku": (0.00025, 0.00125),
}


class AnthropicClient:
def __init__(self, **kwargs: Any):
"""
Initialize the Anthropic API client.
Args:
api_key (str): The API key for the Anthropic API or set the `ANTHROPIC_API_KEY` environment variable.
"""
self._api_key = kwargs.get("api_key", None)

if not self._api_key:
self._api_key = os.getenv("ANTHROPIC_API_KEY")

if self._api_key is None:
raise ValueError("API key is required to use the Anthropic API.")

self._client = Anthropic(api_key=self._api_key)
self._last_tooluse_status = {}

def load_config(self, params: Dict[str, Any]):
"""Load the configuration for the Anthropic API client."""
anthropic_params = {}

anthropic_params["model"] = params.get("model", None)
assert anthropic_params["model"], "Please provide a `model` in the config_list to use the Anthropic API."

anthropic_params["temperature"] = validate_parameter(
params, "temperature", (float, int), False, 1.0, (0.0, 1.0), None
)
anthropic_params["max_tokens"] = validate_parameter(params, "max_tokens", int, False, 4096, (1, None), None)
anthropic_params["top_k"] = validate_parameter(params, "top_k", int, True, None, (1, None), None)
anthropic_params["top_p"] = validate_parameter(params, "top_p", (float, int), True, None, (0.0, 1.0), None)
anthropic_params["stop_sequences"] = validate_parameter(params, "stop_sequences", list, True, None, None, None)
anthropic_params["stream"] = validate_parameter(params, "stream", bool, False, False, None, None)

if anthropic_params["stream"]:
warnings.warn(
"Streaming is not currently supported, streaming will be disabled.",
UserWarning,
)
anthropic_params["stream"] = False

return anthropic_params

def cost(self, response) -> float:
"""Calculate the cost of the completion using the Anthropic pricing."""
return response.cost

@property
def api_key(self):
return self._api_key

def create(self, params: Dict[str, Any]) -> Completion:
"""Create a completion for a given config.

Args:
params: The params for the completion.

Returns:
The completion.
"""
if "tools" in params:
converted_functions = self.convert_tools_to_functions(params["tools"])
params["functions"] = params.get("functions", []) + converted_functions

raw_contents = params["messages"]
anthropic_params = self.load_config(params)

processed_messages = []
for message in raw_contents:

if message["role"] == "system":
params["system"] = message["content"]
elif message["role"] == "function":
processed_messages.append(self.return_function_call_result(message["content"]))
elif "function_call" in message:
processed_messages.append(self.restore_last_tooluse_status())
elif message["content"] == "":
message["content"] = "I'm done. Please send TERMINATE" # Not sure about this one.
processed_messages.append(message)
else:
processed_messages.append(message)

# Check for interleaving roles and correct, for Anthropic must be: user, assistant, user, etc.
for i, message in enumerate(processed_messages):
if message["role"] is not ("user" if i % 2 == 0 else "assistant"):
message["role"] = "user" if i % 2 == 0 else "assistant"

# Note: When using reflection_with_llm we may end up with an "assistant" message as the last message
if processed_messages[-1]["role"] != "user":
# If the last role is not user, add a continue message at the end
continue_message = {"content": "continue", "role": "user"}
processed_messages.append(continue_message)

params["messages"] = processed_messages

# TODO: support stream
params = params.copy()
if "functions" in params:
tools_configs = params.pop("functions")
tools_configs = [self.openai_func_to_anthropic(tool) for tool in tools_configs]
params["tools"] = tools_configs

# Anthropic doesn't accept None values, so we need to use keyword argument unpacking instead of setting parameters.
# Copy params we need into anthropic_params
# Remove any that don't have values
anthropic_params["messages"] = params["messages"]
if "system" in params:
anthropic_params["system"] = params["system"]
if "tools" in params:
anthropic_params["tools"] = params["tools"]
if anthropic_params["top_k"] is None:
del anthropic_params["top_k"]
if anthropic_params["top_p"] is None:
del anthropic_params["top_p"]
if anthropic_params["stop_sequences"] is None:
del anthropic_params["stop_sequences"]

response = self._client.messages.create(**anthropic_params)

# Calculate and save the cost onto the response
prompt_tokens = response.usage.input_tokens
completion_tokens = response.usage.output_tokens
response.cost = _calculate_cost(prompt_tokens, completion_tokens, anthropic_params["model"])

return response

def message_retrieval(self, response: Union[Message]) -> Union[List[str], List[ChatCompletionMessage]]:
"""Retrieve the messages from the response."""
messages = response.content
if len(messages) == 0:
return [None]
res = []
if TOOL_ENABLED:
for choice in messages:
if choice.type == "tool_use":
res.insert(0, self.response_to_openai_message(choice))
self._last_tooluse_status["tool_use"] = choice.model_dump()
else:
res.append(choice.text)
self._last_tooluse_status["think"] = choice.text

return res

else:
return [ # type: ignore [return-value]
choice.text if choice.message.function_call is not None else choice.message.content # type: ignore [union-attr]
for choice in messages
]

def response_to_openai_message(self, response) -> ChatCompletionMessage:
"""Convert the client response to OpenAI ChatCompletion Message"""
dict_response = response.model_dump()
return ChatCompletionMessage(
content=None,
role="assistant",
function_call={"name": dict_response["name"], "arguments": json.dumps(dict_response["input"])},
)

def restore_last_tooluse_status(self) -> Dict:
cached_content = []
if "think" in self._last_tooluse_status:
cached_content.append({"type": "text", "text": self._last_tooluse_status["think"]})
cached_content.append(self._last_tooluse_status["tool_use"])
res = {"role": "assistant", "content": cached_content}
return res

def return_function_call_result(self, result: str) -> Dict:
return {
"role": "user",
"content": [
{
"type": "tool_result",
"tool_use_id": self._last_tooluse_status["tool_use"]["id"],
"content": result,
}
],
}

@staticmethod
def openai_func_to_anthropic(openai_func: dict) -> dict:
res = openai_func.copy()
res["input_schema"] = res.pop("parameters")
return res

@staticmethod
def get_usage(response: Message) -> Dict:
"""Get the usage of tokens and their cost information."""
return {
"prompt_tokens": response.usage.input_tokens if response.usage is not None else 0,
"completion_tokens": response.usage.output_tokens if response.usage is not None else 0,
"total_tokens": (
response.usage.input_tokens + response.usage.output_tokens if response.usage is not None else 0
),
"cost": response.cost if hasattr(response, "cost") else 0.0,
"model": response.model,
}

@staticmethod
def convert_tools_to_functions(tools: List) -> List:
functions = []
for tool in tools:
if tool.get("type") == "function" and "function" in tool:
functions.append(tool["function"])

return functions


def _calculate_cost(input_tokens: int, output_tokens: int, model: str) -> float:
"""Calculate the cost of the completion using the Anthropic pricing."""
total = 0.0

if model in ANTHROPIC_PRICING_1k:
input_cost_per_1k, output_cost_per_1k = ANTHROPIC_PRICING_1k[model]
input_cost = (input_tokens / 1000) * input_cost_per_1k
output_cost = (output_tokens / 1000) * output_cost_per_1k
total = input_cost + output_cost
else:
warnings.warn(f"Cost calculation not available for model {model}", UserWarning)

return total
12 changes: 12 additions & 0 deletions autogen/oai/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,13 @@
except ImportError as e:
gemini_import_exception = e

try:
from autogen.oai.anthropic import AnthropicClient

anthropic_import_exception: Optional[ImportError] = None
except ImportError as e:
anthropic_import_exception = e

logger = logging.getLogger(__name__)
if not logger.handlers:
# Add the console handler.
Expand Down Expand Up @@ -449,6 +456,11 @@ def _register_default_client(self, config: Dict[str, Any], openai_config: Dict[s
raise ImportError("Please install `google-generativeai` to use Google OpenAI API.")
client = GeminiClient(**openai_config)
self._clients.append(client)
elif api_type is not None and api_type.startswith("anthropic"):
if anthropic_import_exception:
raise ImportError("Please install `anthropic` to use Anthropic API.")
client = AnthropicClient(**openai_config)
self._clients.append(client)
else:
client = OpenAI(**openai_config)
self._clients.append(OpenAIClient(client))
Expand Down
Loading
Loading