mirror of https://github.com/microsoft/autogen.git
Ensure message sent to LLMCallEvent for Anthropic is serializable (#6135)
Messages sent as part of `LLMCallEvent` for Anthropic were not fully serializable The example below shows TextBlock and ToolUseBlocks inside the content of messages - these throw downsteam errors in apps like AGS (or event sinks) that expect serializable dicts inside the LLMCallEvent. ``` [ {'role': 'user', 'content': 'What is the weather in New York?'}, {'role': 'assistant', 'content': [TextBlock(citations=None, text='I can help you find the weather in New York. Let me check that for you.', type='text'), ToolUseBlock(id='toolu_016W8g55GejYGBzRRrcsnt7M', input={'city': 'New York'}, name='get_weather', type='tool_use')]}, {'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': 'toolu_016W8g55GejYGBzRRrcsnt7M', 'content': 'The weather in New York is 73 degrees and Sunny.'}]} ] ``` This PR attempts to first serialize content of anthropic messages before they are passed to `LLMCallEvent` ``` [ {'role': 'user', 'content': 'What is the weather in New York?'}, {'role': 'assistant', 'content': [{'citations': None, 'text': 'I can help you find the weather in New York. Let me check that for you.', 'type': 'text'}, {'id': 'toolu_016W8g55GejYGBzRRrcsnt7M', 'input': {'city': 'New York'}, 'name': 'get_weather', 'type': 'tool_use'}]}, {'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': 'toolu_016W8g55GejYGBzRRrcsnt7M', 'content': 'The weather in New York is 73 degrees and Sunny.'}]} ] ```
This commit is contained in:
parent
0d9b574d09
commit
bd572cc112
|
@ -432,6 +432,25 @@ class BaseAnthropicChatCompletionClient(ChatCompletionClient):
|
|||
self._total_usage = RequestUsage(prompt_tokens=0, completion_tokens=0)
|
||||
self._actual_usage = RequestUsage(prompt_tokens=0, completion_tokens=0)
|
||||
|
||||
def _serialize_message(self, message: MessageParam) -> Dict[str, Any]:
|
||||
"""Convert an Anthropic MessageParam to a JSON-serializable format."""
|
||||
if isinstance(message, dict):
|
||||
result: Dict[str, Any] = {}
|
||||
for key, value in message.items():
|
||||
if key == "content" and isinstance(value, list):
|
||||
serialized_blocks: List[Any] = []
|
||||
for block in value: # type: ignore
|
||||
if isinstance(block, BaseModel):
|
||||
serialized_blocks.append(block.model_dump())
|
||||
else:
|
||||
serialized_blocks.append(block)
|
||||
result[key] = serialized_blocks
|
||||
else:
|
||||
result[key] = value
|
||||
return result
|
||||
else:
|
||||
return {"role": "unknown", "content": str(message)}
|
||||
|
||||
def _merge_system_messages(self, messages: Sequence[LLMMessage]) -> Sequence[LLMMessage]:
|
||||
"""
|
||||
Merge continuous system messages into a single message.
|
||||
|
@ -573,10 +592,11 @@ class BaseAnthropicChatCompletionClient(ChatCompletionClient):
|
|||
prompt_tokens=result.usage.input_tokens,
|
||||
completion_tokens=result.usage.output_tokens,
|
||||
)
|
||||
serializable_messages: List[Dict[str, Any]] = [self._serialize_message(msg) for msg in anthropic_messages]
|
||||
|
||||
logger.info(
|
||||
LLMCallEvent(
|
||||
messages=cast(List[Dict[str, Any]], anthropic_messages),
|
||||
messages=serializable_messages,
|
||||
response=result.model_dump(),
|
||||
prompt_tokens=usage.prompt_tokens,
|
||||
completion_tokens=usage.completion_tokens,
|
||||
|
|
|
@ -317,13 +317,9 @@ async def test_anthropic_multimodal() -> None:
|
|||
async def test_anthropic_serialization() -> None:
|
||||
"""Test serialization and deserialization of component."""
|
||||
|
||||
api_key = os.getenv("ANTHROPIC_API_KEY")
|
||||
if not api_key:
|
||||
pytest.skip("ANTHROPIC_API_KEY not found in environment variables")
|
||||
|
||||
client = AnthropicChatCompletionClient(
|
||||
model="claude-3-haiku-20240307",
|
||||
api_key=api_key,
|
||||
api_key="api-key",
|
||||
)
|
||||
|
||||
# Serialize and deserialize
|
||||
|
@ -336,6 +332,42 @@ async def test_anthropic_serialization() -> None:
|
|||
assert isinstance(loaded_model_client, AnthropicChatCompletionClient)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_anthropic_message_serialization_with_tools(caplog: pytest.LogCaptureFixture) -> None:
|
||||
"""Test that complex messages with tool calls are properly serialized in logs."""
|
||||
api_key = os.getenv("ANTHROPIC_API_KEY")
|
||||
if not api_key:
|
||||
pytest.skip("ANTHROPIC_API_KEY not found in environment variables")
|
||||
|
||||
# Use existing tools from the test file
|
||||
pass_tool = FunctionTool(_pass_function, description="Process input text", name="process_text")
|
||||
add_tool = FunctionTool(_add_numbers, description="Add two numbers together", name="add_numbers")
|
||||
|
||||
client = AnthropicChatCompletionClient(
|
||||
model="claude-3-haiku-20240307",
|
||||
api_key=api_key,
|
||||
)
|
||||
|
||||
# Set up logging capture - capture all loggers
|
||||
with caplog.at_level(logging.INFO):
|
||||
# Make a request that should trigger a tool call
|
||||
await client.create(
|
||||
messages=[
|
||||
SystemMessage(content="Use the tools available to help the user."),
|
||||
UserMessage(content="Process the text 'hello world' using the process_text tool.", source="user"),
|
||||
],
|
||||
tools=[pass_tool, add_tool],
|
||||
)
|
||||
|
||||
# Look for any log containing serialized messages, not just with 'LLMCallEvent'
|
||||
serialized_message_logs = [
|
||||
record for record in caplog.records if '"messages":' in str(record.msg) or "messages" in str(record.msg)
|
||||
]
|
||||
|
||||
# Verify we have at least one log with serialized messages
|
||||
assert len(serialized_message_logs) > 0, "No logs with serialized messages found"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_anthropic_muliple_system_message() -> None:
|
||||
"""Test multiple system messages in a single request."""
|
||||
|
@ -347,7 +379,6 @@ async def test_anthropic_muliple_system_message() -> None:
|
|||
model="claude-3-haiku-20240307",
|
||||
api_key=api_key,
|
||||
)
|
||||
|
||||
# Test multiple system messages
|
||||
messages: List[LLMMessage] = [
|
||||
SystemMessage(content="When you say anything Start with 'FOO'"),
|
||||
|
|
Loading…
Reference in New Issue