Skip to content

Commit 0ff97c7

Browse files
authored
Python: add AzureAIAgent, OpenAIAssistant and OpenAIResponsesAgent Integration tests, AzureAIAgent FCC gen bug fix (#11394)
### Motivation and Context AzureAIAgents, OpenAIAssistantAgents, and OpenAIResponsesAgents previously were not covered by integration tests. Adding a set of integration tests to cover high-level usage of an AzureAIAgent and OpenAIAssistantAgents for `get_response`, `invoke`, and `invoke_stream`. As part of this work, a previous fix to typing caused a regression while generating FunctionCallContent for an AzureAIAgent. Fixing to use the correct instance check. <!-- Thank you for your contribution to the semantic-kernel repo! Please help reviewers and future users, providing the following information: 1. Why is this change required? 2. What problem does it solve? 3. What scenario does it contribute to? 4. If it fixes an open issue, please link to the issue here. --> ### Description Adds integration tests for AzureAIAgent and OpenAIAssistantAgent. - Fixes #11393 - Bump version to 1.27.2 for bug fix release. - Remove unnecessary yield for function result content during streaming invocation - these types will be added if the `on_intermediate_message` callback is provided. <!-- Describe your changes, the overall approach, the underlying design. These notes will help understanding how your code works. Thanks! --> ### Contribution Checklist <!-- Before submitting this PR, please make sure: --> - [X] The code builds clean without any errors or warnings - [X] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [X] All unit tests pass, and I have added new tests where possible - [X] I didn't break anyone 😄
1 parent 20b3c66 commit 0ff97c7

File tree

11 files changed

+1025
-16
lines changed

11 files changed

+1025
-16
lines changed

.github/workflows/python-integration-tests.yml

+12-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,11 @@ env:
3232
AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }}
3333
AZURE_OPENAI_AUDIO_TO_TEXT_ENDPOINT: ${{ secrets.AZURE_OPENAI_AUDIO_TO_TEXT_ENDPOINT }}
3434
AZURE_OPENAI_TEXT_TO_AUDIO_ENDPOINT: ${{ secrets.AZURE_OPENAI_TEXT_TO_AUDIO_ENDPOINT }}
35+
AZURE_AI_AGENT_PROJECT_CONNECTION_STRING: ${{ secrets.AZURE_AI_AGENT_PROJECT_CONNECTION_STRING }}
36+
AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME: ${{ secrets.AZURE_AI_AGENT_MODEL_DEPLOYMENT_NAME }}
37+
AZURE_OPENAI_RESPONSES_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_RESPONSES_DEPLOYMENT_NAME }}
3538
BING_API_KEY: ${{ secrets.BING_API_KEY }}
39+
OPENAI_RESPONSES_MODEL_ID: ${{ vars.OPENAI_RESPONSES_MODEL_ID }}
3640
OPENAI_CHAT_MODEL_ID: ${{ vars.OPENAI_CHAT_MODEL_ID }}
3741
OPENAI_TEXT_MODEL_ID: ${{ vars.OPENAI_TEXT_MODEL_ID }}
3842
OPENAI_EMBEDDING_MODEL_ID: ${{ vars.OPENAI_EMBEDDING_MODEL_ID }}
@@ -225,6 +229,13 @@ jobs:
225229
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
226230
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
227231
aws-region: ${{ vars.AWS_REGION }}
232+
- name: Azure CLI Login
233+
if: github.event_name != 'pull_request'
234+
uses: azure/login@v2
235+
with:
236+
client-id: ${{ secrets.AZURE_CLIENT_ID }}
237+
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
238+
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
228239
- name: Run Integration Tests
229240
id: run_tests_agents
230241
shell: bash
@@ -467,7 +478,7 @@ jobs:
467478
uv run pytest -v -n logical --dist loadfile --dist worksteal ./tests/samples
468479
- name: Run Integration Tests - Agents
469480
id: run_tests_agents
470-
timeout-minutes: 5
481+
timeout-minutes: 10
471482
shell: bash
472483
run: |
473484
uv run pytest -v -n logical --dist loadfile --dist worksteal ./tests/integration/agents

python/semantic_kernel/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from semantic_kernel.kernel import Kernel
44

5-
__version__ = "1.27.1"
5+
__version__ = "1.27.2"
66

77
DEFAULT_RC_VERSION = f"{__version__}-rc6"
88

python/semantic_kernel/agents/azure_ai/agent_content_generation.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
MessageTextFileCitationAnnotation,
1515
MessageTextFilePathAnnotation,
1616
MessageTextUrlCitationAnnotation,
17+
RequiredFunctionToolCall,
1718
RunStep,
1819
RunStepDeltaCodeInterpreterDetailItemObject,
1920
RunStepDeltaCodeInterpreterImageOutput,
@@ -214,7 +215,7 @@ def get_function_call_contents(
214215
if not isinstance(tool_calls, (list, tuple)):
215216
return function_call_contents
216217
for tool_call in tool_calls:
217-
if not isinstance(tool_call, RunStepFunctionToolCall):
218+
if not isinstance(tool_call, RequiredFunctionToolCall):
218219
continue
219220
fcc = FunctionCallContent(
220221
id=tool_call.id,

python/semantic_kernel/agents/azure_ai/agent_thread_actions.py

+2-4
Original file line numberDiff line numberDiff line change
@@ -491,10 +491,8 @@ async def _process_stream_events(
491491
if sub_content:
492492
yield sub_content
493493

494-
if action_result.function_result_streaming_content:
495-
yield action_result.function_result_streaming_content
496-
if output_messages is not None:
497-
output_messages.append(action_result.function_result_streaming_content)
494+
if action_result.function_result_streaming_content and output_messages is not None:
495+
output_messages.append(action_result.function_result_streaming_content)
498496

499497
break
500498

python/semantic_kernel/agents/open_ai/assistant_thread_actions.py

+3-6
Original file line numberDiff line numberDiff line change
@@ -492,12 +492,9 @@ async def invoke_stream(
492492
thread_id=thread_id,
493493
tool_outputs=function_action_result.tool_outputs, # type: ignore
494494
)
495-
if function_action_result.function_result_streaming_content:
496-
# Yield the function result content to the caller
497-
yield function_action_result.function_result_streaming_content
498-
if output_messages is not None:
499-
# Add the function result content to the messages list, if it exists
500-
output_messages.append(function_action_result.function_result_streaming_content)
495+
if function_action_result.function_result_streaming_content and output_messages is not None:
496+
# Add the function result content to the messages list, if it exists
497+
output_messages.append(function_action_result.function_result_streaming_content)
501498
break
502499
elif event.event == "thread.run.completed":
503500
run = event.data

python/semantic_kernel/agents/open_ai/responses_agent_thread_actions.py

-1
Original file line numberDiff line numberDiff line change
@@ -459,7 +459,6 @@ async def invoke_stream(
459459
msg = function_result_messages[0]
460460
if output_messages is not None:
461461
output_messages.append(msg)
462-
yield msg # Always yield the first message if eligible
463462

464463
if any(result.terminate for result in results if result is not None):
465464
break # Only break if any result has terminate=True
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,263 @@
1+
# Copyright (c) Microsoft. All rights reserved.
2+
3+
import os
4+
from typing import Annotated
5+
6+
import pytest
7+
from azure.ai.projects.models import CodeInterpreterTool, FileSearchTool
8+
from azure.identity.aio import DefaultAzureCredential
9+
10+
from semantic_kernel.agents import AzureAIAgent, AzureAIAgentSettings
11+
from semantic_kernel.contents.chat_message_content import ChatMessageContent
12+
from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent
13+
from semantic_kernel.contents.utils.author_role import AuthorRole
14+
from semantic_kernel.functions.kernel_function_decorator import kernel_function
15+
16+
17+
class WeatherPlugin:
18+
"""Mock weather plugin."""
19+
20+
@kernel_function(description="Get real-time weather information.")
21+
def current_weather(self, location: Annotated[str, "The location to get the weather"]) -> str:
22+
"""Returns the current weather."""
23+
return f"The weather in {location} is sunny."
24+
25+
26+
class TestAzureAIAgentIntegration:
27+
@pytest.fixture
28+
async def azureai_agent(self, request):
29+
ai_agent_settings = AzureAIAgentSettings.create()
30+
async with (
31+
DefaultAzureCredential() as creds,
32+
AzureAIAgent.create_client(credential=creds) as client,
33+
):
34+
tools, tool_resources, plugins = [], {}, []
35+
36+
params = getattr(request, "param", {})
37+
if params.get("enable_code_interpreter"):
38+
ci_tool = CodeInterpreterTool()
39+
tools.extend(ci_tool.definitions)
40+
tool_resources.update(ci_tool.resources)
41+
42+
if params.get("enable_file_search"):
43+
pdf_file_path = os.path.join(
44+
os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "resources", "employees.pdf"
45+
)
46+
file = await client.agents.upload_file_and_poll(file_path=pdf_file_path, purpose="assistants")
47+
vector_store = await client.agents.create_vector_store_and_poll(
48+
file_ids=[file.id], name="my_vectorstore"
49+
)
50+
fs_tool = FileSearchTool(vector_store_ids=[vector_store.id])
51+
tools.extend(fs_tool.definitions)
52+
tool_resources.update(fs_tool.resources)
53+
54+
if params.get("enable_kernel_function"):
55+
plugins.append(WeatherPlugin())
56+
57+
agent_definition = await client.agents.create_agent(
58+
model=ai_agent_settings.model_deployment_name,
59+
tools=tools,
60+
tool_resources=tool_resources,
61+
name="SKPythonIntegrationTestAgent",
62+
instructions="You are a helpful assistant that help users with their questions.",
63+
)
64+
65+
azureai_agent = AzureAIAgent(
66+
client=client,
67+
definition=agent_definition,
68+
plugins=plugins,
69+
)
70+
71+
yield azureai_agent # yield agent for test method to use
72+
73+
# cleanup
74+
await azureai_agent.client.agents.delete_agent(azureai_agent.id)
75+
76+
async def test_get_response(self, azureai_agent: AzureAIAgent):
77+
"""Test get response of the agent."""
78+
response = await azureai_agent.get_response(messages="Hello")
79+
assert isinstance(response.message, ChatMessageContent)
80+
assert response.message.role == AuthorRole.ASSISTANT
81+
assert response.message.content is not None
82+
83+
async def test_get_response_with_thread(self, azureai_agent: AzureAIAgent):
84+
"""Test get response of the agent with a thread."""
85+
thread = None
86+
user_messages = ["Hello, I am John Doe.", "What is my name?"]
87+
for user_message in user_messages:
88+
response = await azureai_agent.get_response(messages=user_message, thread=thread)
89+
thread = response.thread
90+
assert thread is not None
91+
assert isinstance(response.message, ChatMessageContent)
92+
assert response.message.role == AuthorRole.ASSISTANT
93+
assert response.message.content is not None
94+
await thread.delete() if thread else None
95+
96+
async def test_invoke(self, azureai_agent: AzureAIAgent):
97+
"""Test invoke of the agent."""
98+
async for response in azureai_agent.invoke(messages="Hello"):
99+
assert isinstance(response.message, ChatMessageContent)
100+
assert response.message.role == AuthorRole.ASSISTANT
101+
assert response.message.content is not None
102+
103+
async def test_invoke_with_thread(self, azureai_agent: AzureAIAgent):
104+
"""Test invoke of the agent with a thread."""
105+
thread = None
106+
user_messages = ["Hello, I am John Doe.", "What is my name?"]
107+
for user_message in user_messages:
108+
async for response in azureai_agent.invoke(messages=user_message, thread=thread):
109+
thread = response.thread
110+
assert thread is not None
111+
assert isinstance(response.message, ChatMessageContent)
112+
assert response.message.role == AuthorRole.ASSISTANT
113+
assert response.message.content is not None
114+
await thread.delete() if thread else None
115+
116+
async def test_invoke_stream(self, azureai_agent: AzureAIAgent):
117+
"""Test invoke stream of the agent."""
118+
async for response in azureai_agent.invoke_stream(messages="Hello"):
119+
assert isinstance(response.message, StreamingChatMessageContent)
120+
assert response.message.role == AuthorRole.ASSISTANT
121+
assert response.message.content is not None
122+
123+
@pytest.mark.parametrize("azureai_agent", [{"enable_code_interpreter": True}], indirect=True)
124+
async def test_invoke_stream_with_thread(self, azureai_agent: AzureAIAgent):
125+
"""Test invoke stream of the agent with a thread."""
126+
thread = None
127+
user_messages = ["Hello, I am John Doe.", "What is my name?"]
128+
for user_message in user_messages:
129+
async for response in azureai_agent.invoke_stream(messages=user_message, thread=thread):
130+
thread = response.thread
131+
assert thread is not None
132+
assert isinstance(response.message, StreamingChatMessageContent)
133+
assert response.message.role == AuthorRole.ASSISTANT
134+
assert response.message.content is not None
135+
await thread.delete() if thread else None
136+
137+
@pytest.mark.parametrize("azureai_agent", [{"enable_code_interpreter": True}], indirect=True)
138+
async def test_code_interpreter_get_response(self, azureai_agent: AzureAIAgent):
139+
"""Test code interpreter."""
140+
input_text = """
141+
Create a bar chart for the following data:
142+
Panda 5
143+
Tiger 8
144+
Lion 3
145+
Monkey 6
146+
Dolphin 2
147+
"""
148+
response = await azureai_agent.get_response(messages=input_text)
149+
assert isinstance(response.message, ChatMessageContent)
150+
assert response.message.role == AuthorRole.ASSISTANT
151+
assert response.message.content is not None
152+
153+
@pytest.mark.parametrize("azureai_agent", [{"enable_code_interpreter": True}], indirect=True)
154+
async def test_code_interpreter_invoke(self, azureai_agent: AzureAIAgent):
155+
"""Test code interpreter."""
156+
input_text = """
157+
Create a bar chart for the following data:
158+
Panda 5
159+
Tiger 8
160+
Lion 3
161+
Monkey 6
162+
Dolphin 2
163+
"""
164+
async for response in azureai_agent.invoke(messages=input_text):
165+
assert isinstance(response.message, ChatMessageContent)
166+
assert response.message.role == AuthorRole.ASSISTANT
167+
assert response.message.content is not None
168+
169+
@pytest.mark.parametrize("azureai_agent", [{"enable_code_interpreter": True}], indirect=True)
170+
async def test_code_interpreter_invoke_stream(self, azureai_agent: AzureAIAgent):
171+
"""Test code interpreter streaming."""
172+
input_text = """
173+
Create a bar chart for the following data:
174+
Panda 5
175+
Tiger 8
176+
Lion 3
177+
Monkey 6
178+
Dolphin 2
179+
"""
180+
async for response in azureai_agent.invoke_stream(messages=input_text):
181+
assert isinstance(response.message, StreamingChatMessageContent)
182+
assert response.message.role == AuthorRole.ASSISTANT
183+
assert response.message.content is not None
184+
185+
@pytest.mark.parametrize("azureai_agent", [{"enable_file_search": True}], indirect=True)
186+
async def test_file_search_get_response(self, azureai_agent: AzureAIAgent):
187+
"""Test code interpreter."""
188+
input_text = "Who is the youngest employee?"
189+
response = await azureai_agent.get_response(messages=input_text)
190+
assert isinstance(response.message, ChatMessageContent)
191+
assert response.message.role == AuthorRole.ASSISTANT
192+
193+
@pytest.mark.parametrize("azureai_agent", [{"enable_file_search": True}], indirect=True)
194+
async def test_file_search_invoke(self, azureai_agent: AzureAIAgent):
195+
"""Test code interpreter."""
196+
input_text = "Who is the youngest employee?"
197+
async for response in azureai_agent.invoke(messages=input_text):
198+
assert isinstance(response.message, ChatMessageContent)
199+
assert response.message.role == AuthorRole.ASSISTANT
200+
201+
@pytest.mark.parametrize("azureai_agent", [{"enable_file_search": True}], indirect=True)
202+
async def test_file_search_invoke_stream(self, azureai_agent: AzureAIAgent):
203+
"""Test code interpreter streaming."""
204+
input_text = "Who is the youngest employee?"
205+
async for response in azureai_agent.invoke_stream(messages=input_text):
206+
assert isinstance(response.message, StreamingChatMessageContent)
207+
assert response.message.role == AuthorRole.ASSISTANT
208+
209+
@pytest.mark.parametrize(
210+
"azureai_agent",
211+
[
212+
{
213+
"enable_kernel_function": True,
214+
},
215+
],
216+
indirect=True,
217+
)
218+
async def test_function_calling_get_response(self, azureai_agent: AzureAIAgent):
219+
"""Test function calling."""
220+
response = await azureai_agent.get_response(
221+
messages="What is the weather in Seattle?",
222+
)
223+
assert isinstance(response.message, ChatMessageContent)
224+
assert response.message.role == AuthorRole.ASSISTANT
225+
assert "sunny" in response.message.content
226+
227+
@pytest.mark.parametrize(
228+
"azureai_agent",
229+
[
230+
{
231+
"enable_kernel_function": True,
232+
},
233+
],
234+
indirect=True,
235+
)
236+
async def test_function_calling_invoke(self, azureai_agent: AzureAIAgent):
237+
"""Test function calling."""
238+
async for response in azureai_agent.invoke(
239+
messages="What is the weather in Seattle?",
240+
):
241+
assert isinstance(response.message, ChatMessageContent)
242+
assert response.message.role == AuthorRole.ASSISTANT
243+
assert "sunny" in response.message.content
244+
245+
@pytest.mark.parametrize(
246+
"azureai_agent",
247+
[
248+
{
249+
"enable_kernel_function": True,
250+
},
251+
],
252+
indirect=True,
253+
)
254+
async def test_function_calling_stream(self, azureai_agent: AzureAIAgent):
255+
"""Test function calling streaming."""
256+
full_message: str = ""
257+
async for response in azureai_agent.invoke_stream(
258+
messages="What is the weather in Seattle?",
259+
):
260+
assert isinstance(response.message, StreamingChatMessageContent)
261+
assert response.message.role == AuthorRole.ASSISTANT
262+
full_message += response.message.content
263+
assert "sunny" in full_message

0 commit comments

Comments
 (0)