mirror of
https://github.com/home-assistant/core.git
synced 2026-02-15 07:36:16 +00:00
Fix JSON serialization of time objects in Open Router tool results (#162505)
This commit is contained in:
@@ -34,6 +34,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.json import json_dumps
|
||||
|
||||
from . import OpenRouterConfigEntry
|
||||
from .const import DOMAIN, LOGGER
|
||||
@@ -109,7 +110,7 @@ def _convert_content_to_chat_message(
|
||||
return ChatCompletionToolMessageParam(
|
||||
role="tool",
|
||||
tool_call_id=content.tool_call_id,
|
||||
content=json.dumps(content.tool_result),
|
||||
content=json_dumps(content.tool_result),
|
||||
)
|
||||
|
||||
role: Literal["user", "assistant", "system"] = content.role
|
||||
@@ -130,7 +131,7 @@ def _convert_content_to_chat_message(
|
||||
type="function",
|
||||
id=tool_call.id,
|
||||
function=Function(
|
||||
arguments=json.dumps(tool_call.tool_args),
|
||||
arguments=json_dumps(tool_call.tool_args),
|
||||
name=tool_call.tool_name,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -126,6 +126,65 @@
|
||||
# ---
|
||||
# name: test_function_call[True]
|
||||
list([
|
||||
dict({
|
||||
'attachments': None,
|
||||
'content': 'What time is it?',
|
||||
'created': HAFakeDatetime(2024, 5, 24, 12, 0, tzinfo=datetime.timezone.utc),
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'agent_id': 'conversation.gpt_3_5_turbo',
|
||||
'content': None,
|
||||
'created': HAFakeDatetime(2024, 5, 24, 12, 0, tzinfo=datetime.timezone.utc),
|
||||
'native': None,
|
||||
'role': 'assistant',
|
||||
'thinking_content': None,
|
||||
'tool_calls': list([
|
||||
dict({
|
||||
'external': True,
|
||||
'id': 'mock_tool_call_id',
|
||||
'tool_args': dict({
|
||||
}),
|
||||
'tool_name': 'HassGetCurrentTime',
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
dict({
|
||||
'agent_id': 'conversation.gpt_3_5_turbo',
|
||||
'created': HAFakeDatetime(2024, 5, 24, 12, 0, tzinfo=datetime.timezone.utc),
|
||||
'role': 'tool_result',
|
||||
'tool_call_id': 'mock_tool_call_id',
|
||||
'tool_name': 'HassGetCurrentTime',
|
||||
'tool_result': dict({
|
||||
'data': dict({
|
||||
'failed': list([
|
||||
]),
|
||||
'success': list([
|
||||
]),
|
||||
'targets': list([
|
||||
]),
|
||||
}),
|
||||
'response_type': 'action_done',
|
||||
'speech': dict({
|
||||
'plain': dict({
|
||||
'extra_data': None,
|
||||
'speech': '12:00 PM',
|
||||
}),
|
||||
}),
|
||||
'speech_slots': dict({
|
||||
'time': datetime.time(12, 0),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
dict({
|
||||
'agent_id': 'conversation.gpt_3_5_turbo',
|
||||
'content': '12:00 PM',
|
||||
'created': HAFakeDatetime(2024, 5, 24, 12, 0, tzinfo=datetime.timezone.utc),
|
||||
'native': None,
|
||||
'role': 'assistant',
|
||||
'thinking_content': None,
|
||||
'tool_calls': None,
|
||||
}),
|
||||
dict({
|
||||
'attachments': None,
|
||||
'content': 'Please call the test function',
|
||||
@@ -169,3 +228,68 @@
|
||||
}),
|
||||
])
|
||||
# ---
|
||||
# name: test_function_call[True].1
|
||||
list([
|
||||
dict({
|
||||
'content': '''
|
||||
You are a helpful assistant.
|
||||
Only if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.
|
||||
''',
|
||||
'role': 'system',
|
||||
}),
|
||||
dict({
|
||||
'content': 'What time is it?',
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': None,
|
||||
'role': 'assistant',
|
||||
'tool_calls': list([
|
||||
dict({
|
||||
'function': dict({
|
||||
'arguments': '{}',
|
||||
'name': 'HassGetCurrentTime',
|
||||
}),
|
||||
'id': 'mock_tool_call_id',
|
||||
'type': 'function',
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
dict({
|
||||
'content': '{"speech":{"plain":{"speech":"12:00 PM","extra_data":null}},"response_type":"action_done","speech_slots":{"time":"12:00:00"},"data":{"targets":[],"success":[],"failed":[]}}',
|
||||
'role': 'tool',
|
||||
'tool_call_id': 'mock_tool_call_id',
|
||||
}),
|
||||
dict({
|
||||
'content': '12:00 PM',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
dict({
|
||||
'content': 'Please call the test function',
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': None,
|
||||
'role': 'assistant',
|
||||
'tool_calls': list([
|
||||
dict({
|
||||
'function': dict({
|
||||
'arguments': '{"param1":"call1"}',
|
||||
'name': 'test_tool',
|
||||
}),
|
||||
'id': 'call_call_1',
|
||||
'type': 'function',
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
dict({
|
||||
'content': '"value1"',
|
||||
'role': 'tool',
|
||||
'tool_call_id': 'call_call_1',
|
||||
}),
|
||||
dict({
|
||||
'content': 'I have successfully called the function',
|
||||
'role': 'assistant',
|
||||
}),
|
||||
])
|
||||
# ---
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Tests for the OpenRouter integration."""
|
||||
|
||||
import datetime
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from freezegun import freeze_time
|
||||
@@ -18,6 +19,7 @@ from homeassistant.components import conversation
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import Context, HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er, intent
|
||||
from homeassistant.helpers.llm import ToolInput
|
||||
|
||||
from . import setup_integration
|
||||
|
||||
@@ -88,6 +90,43 @@ async def test_function_call(
|
||||
"""Test function call from the assistant."""
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
# Add some pre-existing content from conversation.default_agent
|
||||
mock_chat_log.async_add_user_content(
|
||||
conversation.UserContent(content="What time is it?")
|
||||
)
|
||||
mock_chat_log.async_add_assistant_content_without_tools(
|
||||
conversation.AssistantContent(
|
||||
agent_id="conversation.gpt_3_5_turbo",
|
||||
tool_calls=[
|
||||
ToolInput(
|
||||
tool_name="HassGetCurrentTime",
|
||||
tool_args={},
|
||||
id="mock_tool_call_id",
|
||||
external=True,
|
||||
)
|
||||
],
|
||||
)
|
||||
)
|
||||
mock_chat_log.async_add_assistant_content_without_tools(
|
||||
conversation.ToolResultContent(
|
||||
agent_id="conversation.gpt_3_5_turbo",
|
||||
tool_call_id="mock_tool_call_id",
|
||||
tool_name="HassGetCurrentTime",
|
||||
tool_result={
|
||||
"speech": {"plain": {"speech": "12:00 PM", "extra_data": None}},
|
||||
"response_type": "action_done",
|
||||
"speech_slots": {"time": datetime.time(12, 0)},
|
||||
"data": {"targets": [], "success": [], "failed": []},
|
||||
},
|
||||
)
|
||||
)
|
||||
mock_chat_log.async_add_assistant_content_without_tools(
|
||||
conversation.AssistantContent(
|
||||
agent_id="conversation.gpt_3_5_turbo",
|
||||
content="12:00 PM",
|
||||
)
|
||||
)
|
||||
|
||||
mock_chat_log.mock_tool_results(
|
||||
{
|
||||
"call_call_1": "value1",
|
||||
@@ -95,34 +134,8 @@ async def test_function_call(
|
||||
}
|
||||
)
|
||||
|
||||
async def completion_result(*args, messages, **kwargs):
|
||||
for message in messages:
|
||||
role = message["role"] if isinstance(message, dict) else message.role
|
||||
if role == "tool":
|
||||
return ChatCompletion(
|
||||
id="chatcmpl-1234567890ZYXWVUTSRQPONMLKJIH",
|
||||
choices=[
|
||||
Choice(
|
||||
finish_reason="stop",
|
||||
index=0,
|
||||
message=ChatCompletionMessage(
|
||||
content="I have successfully called the function",
|
||||
role="assistant",
|
||||
function_call=None,
|
||||
tool_calls=None,
|
||||
),
|
||||
)
|
||||
],
|
||||
created=1700000000,
|
||||
model="gpt-4-1106-preview",
|
||||
object="chat.completion",
|
||||
system_fingerprint=None,
|
||||
usage=CompletionUsage(
|
||||
completion_tokens=9, prompt_tokens=8, total_tokens=17
|
||||
),
|
||||
)
|
||||
|
||||
return ChatCompletion(
|
||||
mock_openai_client.chat.completions.create.side_effect = (
|
||||
ChatCompletion(
|
||||
id="chatcmpl-1234567890ABCDEFGHIJKLMNOPQRS",
|
||||
choices=[
|
||||
Choice(
|
||||
@@ -152,9 +165,30 @@ async def test_function_call(
|
||||
usage=CompletionUsage(
|
||||
completion_tokens=9, prompt_tokens=8, total_tokens=17
|
||||
),
|
||||
)
|
||||
|
||||
mock_openai_client.chat.completions.create = completion_result
|
||||
),
|
||||
ChatCompletion(
|
||||
id="chatcmpl-1234567890ZYXWVUTSRQPONMLKJIH",
|
||||
choices=[
|
||||
Choice(
|
||||
finish_reason="stop",
|
||||
index=0,
|
||||
message=ChatCompletionMessage(
|
||||
content="I have successfully called the function",
|
||||
role="assistant",
|
||||
function_call=None,
|
||||
tool_calls=None,
|
||||
),
|
||||
)
|
||||
],
|
||||
created=1700000000,
|
||||
model="gpt-4-1106-preview",
|
||||
object="chat.completion",
|
||||
system_fingerprint=None,
|
||||
usage=CompletionUsage(
|
||||
completion_tokens=9, prompt_tokens=8, total_tokens=17
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
result = await conversation.async_converse(
|
||||
hass,
|
||||
@@ -167,3 +201,8 @@ async def test_function_call(
|
||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
# Don't test the prompt, as it's not deterministic
|
||||
assert mock_chat_log.content[1:] == snapshot
|
||||
assert mock_openai_client.chat.completions.create.call_count == 2
|
||||
assert (
|
||||
mock_openai_client.chat.completions.create.call_args.kwargs["messages"]
|
||||
== snapshot
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user