Skip to content

Commit 3b98458

Browse files
authored
Merge pull request #777 from giles17/azure_openai_agent_features
Added Azure OpenAI Agent Features
2 parents 6a4c07f + 62e185e commit 3b98458

2 files changed

Lines changed: 376 additions & 0 deletions

File tree

agent-framework/user-guide/agents/agent-types/azure-openai-chat-completion-agent.md

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -233,6 +233,34 @@ async def main():
233233
asyncio.run(main())
234234
```
235235

236+
### Using Threads for Context Management
237+
238+
Maintain conversation context across multiple interactions:
239+
240+
```python
241+
import asyncio
242+
from agent_framework.azure import AzureOpenAIChatClient
243+
from azure.identity import AzureCliCredential
244+
245+
async def main():
246+
agent = AzureOpenAIChatClient(credential=AzureCliCredential()).create_agent(
247+
instructions="You are a helpful programming assistant."
248+
)
249+
250+
# Create a new thread for conversation context
251+
thread = agent.get_new_thread()
252+
253+
# First interaction
254+
result1 = await agent.run("I'm working on a Python web application.", thread=thread, store=True)
255+
print(f"Assistant: {result1.text}")
256+
257+
# Second interaction - context is preserved
258+
result2 = await agent.run("What framework should I use?", thread=thread, store=True)
259+
print(f"Assistant: {result2.text}")
260+
261+
asyncio.run(main())
262+
```
263+
236264
### Streaming Responses
237265

238266
Get responses as they are generated using streaming:

agent-framework/user-guide/agents/agent-types/azure-openai-responses-agent.md

Lines changed: 348 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -159,6 +159,63 @@ asyncio.run(main())
159159

160160
## Agent Features
161161

162+
### Reasoning Models
163+
164+
Azure OpenAI Responses agents support advanced reasoning models like o1 for complex problem-solving:
165+
166+
```python
167+
import asyncio
168+
from agent_framework.azure import AzureOpenAIResponsesClient
169+
from azure.identity import AzureCliCredential
170+
171+
async def main():
172+
agent = AzureOpenAIResponsesClient(
173+
deployment_name="o1-preview", # Use reasoning model
174+
credential=AzureCliCredential()
175+
).create_agent(
176+
instructions="You are a helpful assistant that excels at complex reasoning.",
177+
name="ReasoningAgent"
178+
)
179+
180+
result = await agent.run("Solve this logic puzzle: If A > B, B > C, and C > D, and we know D = 5, B = 10, what can we determine about A?")
181+
print(result.text)
182+
183+
asyncio.run(main())
184+
```
185+
186+
### Structured Output
187+
188+
Get structured responses from Azure OpenAI Responses agents:
189+
190+
```python
191+
import asyncio
192+
from typing import Annotated
193+
from agent_framework.azure import AzureOpenAIResponsesClient
194+
from azure.identity import AzureCliCredential
195+
from pydantic import BaseModel, Field
196+
197+
class WeatherForecast(BaseModel):
198+
location: Annotated[str, Field(description="The location")]
199+
temperature: Annotated[int, Field(description="Temperature in Celsius")]
200+
condition: Annotated[str, Field(description="Weather condition")]
201+
humidity: Annotated[int, Field(description="Humidity percentage")]
202+
203+
async def main():
204+
agent = AzureOpenAIResponsesClient(credential=AzureCliCredential()).create_agent(
205+
instructions="You are a weather assistant that provides structured forecasts.",
206+
response_format=WeatherForecast
207+
)
208+
209+
result = await agent.run("What's the weather like in Paris today?")
210+
weather_data = result.value
211+
print(f"Location: {weather_data.location}")
212+
print(f"Temperature: {weather_data.temperature}°C")
213+
print(f"Condition: {weather_data.condition}")
214+
print(f"Humidity: {weather_data.humidity}%")
215+
216+
asyncio.run(main())
217+
```
218+
162219
### Function Tools
163220

164221
You can provide custom function tools to Azure OpenAI Responses agents:
@@ -210,6 +267,297 @@ async def main():
210267
asyncio.run(main())
211268
```
212269

270+
#### Code Interpreter with File Upload
271+
272+
For data analysis tasks, you can upload files and analyze them with code:
273+
274+
```python
275+
import asyncio
276+
import os
277+
import tempfile
278+
from agent_framework import ChatAgent, HostedCodeInterpreterTool
279+
from agent_framework.azure import AzureOpenAIResponsesClient
280+
from azure.identity import AzureCliCredential
281+
from openai import AsyncAzureOpenAI
282+
283+
async def create_sample_file_and_upload(openai_client: AsyncAzureOpenAI) -> tuple[str, str]:
284+
"""Create a sample CSV file and upload it to Azure OpenAI."""
285+
csv_data = """name,department,salary,years_experience
286+
Alice Johnson,Engineering,95000,5
287+
Bob Smith,Sales,75000,3
288+
Carol Williams,Engineering,105000,8
289+
David Brown,Marketing,68000,2
290+
Emma Davis,Sales,82000,4
291+
Frank Wilson,Engineering,88000,6
292+
"""
293+
294+
# Create temporary CSV file
295+
with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as temp_file:
296+
temp_file.write(csv_data)
297+
temp_file_path = temp_file.name
298+
299+
# Upload file to Azure OpenAI
300+
print("Uploading file to Azure OpenAI...")
301+
with open(temp_file_path, "rb") as file:
302+
uploaded_file = await openai_client.files.create(
303+
file=file,
304+
purpose="assistants", # Required for code interpreter
305+
)
306+
307+
print(f"File uploaded with ID: {uploaded_file.id}")
308+
return temp_file_path, uploaded_file.id
309+
310+
async def cleanup_files(openai_client: AsyncAzureOpenAI, temp_file_path: str, file_id: str) -> None:
311+
"""Clean up both local temporary file and uploaded file."""
312+
# Clean up: delete the uploaded file
313+
await openai_client.files.delete(file_id)
314+
print(f"Cleaned up uploaded file: {file_id}")
315+
316+
# Clean up temporary local file
317+
os.unlink(temp_file_path)
318+
print(f"Cleaned up temporary file: {temp_file_path}")
319+
320+
async def main():
321+
print("=== Azure OpenAI Code Interpreter with File Upload ===")
322+
323+
# Initialize Azure OpenAI client for file operations
324+
credential = AzureCliCredential()
325+
326+
async def get_token():
327+
token = credential.get_token("https://cognitiveservices.azure.com/.default")
328+
return token.token
329+
330+
openai_client = AsyncAzureOpenAI(
331+
azure_ad_token_provider=get_token,
332+
api_version="2024-05-01-preview",
333+
)
334+
335+
temp_file_path, file_id = await create_sample_file_and_upload(openai_client)
336+
337+
# Create agent using Azure OpenAI Responses client
338+
async with ChatAgent(
339+
chat_client=AzureOpenAIResponsesClient(credential=credential),
340+
instructions="You are a helpful assistant that can analyze data files using Python code.",
341+
tools=HostedCodeInterpreterTool(inputs=[{"file_id": file_id}]),
342+
) as agent:
343+
# Test the code interpreter with the uploaded file
344+
query = "Analyze the employee data in the uploaded CSV file. Calculate average salary by department."
345+
print(f"User: {query}")
346+
result = await agent.run(query)
347+
print(f"Agent: {result.text}")
348+
349+
await cleanup_files(openai_client, temp_file_path, file_id)
350+
351+
asyncio.run(main())
352+
```
353+
354+
### File Search
355+
356+
Enable your agent to search through uploaded documents and files:
357+
358+
```python
359+
import asyncio
360+
from agent_framework import ChatAgent, HostedFileSearchTool, HostedVectorStoreContent
361+
from agent_framework.azure import AzureOpenAIResponsesClient
362+
from azure.identity import AzureCliCredential
363+
364+
async def create_vector_store(client: AzureOpenAIResponsesClient) -> tuple[str, HostedVectorStoreContent]:
365+
"""Create a vector store with sample documents."""
366+
file = await client.client.files.create(
367+
file=("todays_weather.txt", b"The weather today is sunny with a high of 75F."),
368+
purpose="assistants"
369+
)
370+
vector_store = await client.client.vector_stores.create(
371+
name="knowledge_base",
372+
expires_after={"anchor": "last_active_at", "days": 1},
373+
)
374+
result = await client.client.vector_stores.files.create_and_poll(
375+
vector_store_id=vector_store.id,
376+
file_id=file.id
377+
)
378+
if result.last_error is not None:
379+
raise Exception(f"Vector store file processing failed with status: {result.last_error.message}")
380+
381+
return file.id, HostedVectorStoreContent(vector_store_id=vector_store.id)
382+
383+
async def delete_vector_store(client: AzureOpenAIResponsesClient, file_id: str, vector_store_id: str) -> None:
384+
"""Delete the vector store after using it."""
385+
await client.client.vector_stores.delete(vector_store_id=vector_store_id)
386+
await client.client.files.delete(file_id=file_id)
387+
388+
async def main():
389+
print("=== Azure OpenAI Responses Client with File Search Example ===\n")
390+
391+
# Initialize Responses client
392+
client = AzureOpenAIResponsesClient(credential=AzureCliCredential())
393+
394+
file_id, vector_store = await create_vector_store(client)
395+
396+
async with ChatAgent(
397+
chat_client=client,
398+
instructions="You are a helpful assistant that can search through files to find information.",
399+
tools=[HostedFileSearchTool(inputs=vector_store)],
400+
) as agent:
401+
query = "What is the weather today? Do a file search to find the answer."
402+
print(f"User: {query}")
403+
result = await agent.run(query)
404+
print(f"Agent: {result}\n")
405+
406+
await delete_vector_store(client, file_id, vector_store.vector_store_id)
407+
408+
asyncio.run(main())
409+
```
410+
411+
### Model Context Protocol (MCP) Tools
412+
413+
#### Local MCP Tools
414+
415+
Connect to local MCP servers for extended capabilities:
416+
417+
```python
418+
import asyncio
419+
from agent_framework import ChatAgent, MCPStreamableHTTPTool
420+
from agent_framework.azure import AzureOpenAIResponsesClient
421+
from azure.identity import AzureCliCredential
422+
423+
async def main():
424+
"""Example showing local MCP tools for Azure OpenAI Responses Agent."""
425+
# Create Azure OpenAI Responses client
426+
responses_client = AzureOpenAIResponsesClient(credential=AzureCliCredential())
427+
428+
# Create agent
429+
agent = responses_client.create_agent(
430+
name="DocsAgent",
431+
instructions="You are a helpful assistant that can help with Microsoft documentation questions.",
432+
)
433+
434+
# Connect to the MCP server (Streamable HTTP)
435+
async with MCPStreamableHTTPTool(
436+
name="Microsoft Learn MCP",
437+
url="https://learn.microsoft.com/api/mcp",
438+
) as mcp_tool:
439+
# First query — expect the agent to use the MCP tool if it helps
440+
first_query = "How to create an Azure storage account using az cli?"
441+
first_result = await agent.run(first_query, tools=mcp_tool)
442+
print("\n=== Answer 1 ===\n", first_result.text)
443+
444+
# Follow-up query (connection is reused)
445+
second_query = "What is Microsoft Agent Framework?"
446+
second_result = await agent.run(second_query, tools=mcp_tool)
447+
print("\n=== Answer 2 ===\n", second_result.text)
448+
449+
asyncio.run(main())
450+
```
451+
452+
#### Hosted MCP Tools
453+
454+
Use hosted MCP tools with approval workflows:
455+
456+
```python
457+
import asyncio
458+
from agent_framework import ChatAgent, HostedMCPTool
459+
from agent_framework.azure import AzureOpenAIResponsesClient
460+
from azure.identity import AzureCliCredential
461+
462+
async def main():
463+
"""Example showing hosted MCP tools without approvals."""
464+
credential = AzureCliCredential()
465+
466+
async with ChatAgent(
467+
chat_client=AzureOpenAIResponsesClient(credential=credential),
468+
name="DocsAgent",
469+
instructions="You are a helpful assistant that can help with microsoft documentation questions.",
470+
tools=HostedMCPTool(
471+
name="Microsoft Learn MCP",
472+
url="https://learn.microsoft.com/api/mcp",
473+
# Auto-approve all function calls for seamless experience
474+
approval_mode="never_require",
475+
),
476+
) as agent:
477+
# First query
478+
first_query = "How to create an Azure storage account using az cli?"
479+
print(f"User: {first_query}")
480+
first_result = await agent.run(first_query)
481+
print(f"Agent: {first_result.text}\n")
482+
483+
print("\n=======================================\n")
484+
485+
# Second query
486+
second_query = "What is Microsoft Agent Framework?"
487+
print(f"User: {second_query}")
488+
second_result = await agent.run(second_query)
489+
print(f"Agent: {second_result.text}\n")
490+
491+
asyncio.run(main())
492+
```
493+
494+
### Image Analysis
495+
496+
Azure OpenAI Responses agents support multimodal interactions including image analysis:
497+
498+
```python
499+
import asyncio
500+
from agent_framework import ChatMessage, TextContent, UriContent
501+
from agent_framework.azure import AzureOpenAIResponsesClient
502+
from azure.identity import AzureCliCredential
503+
504+
async def main():
505+
print("=== Azure Responses Agent with Image Analysis ===")
506+
507+
# Create an Azure Responses agent with vision capabilities
508+
agent = AzureOpenAIResponsesClient(credential=AzureCliCredential()).create_agent(
509+
name="VisionAgent",
510+
instructions="You are a helpful agent that can analyze images.",
511+
)
512+
513+
# Create a message with both text and image content
514+
user_message = ChatMessage(
515+
role="user",
516+
contents=[
517+
TextContent(text="What do you see in this image?"),
518+
UriContent(
519+
uri="https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg",
520+
media_type="image/jpeg",
521+
),
522+
],
523+
)
524+
525+
# Get the agent's response
526+
print("User: What do you see in this image? [Image provided]")
527+
result = await agent.run(user_message)
528+
print(f"Agent: {result.text}")
529+
530+
asyncio.run(main())
531+
```
532+
533+
### Using Threads for Context Management
534+
535+
Maintain conversation context across multiple interactions:
536+
537+
```python
538+
import asyncio
539+
from agent_framework.azure import AzureOpenAIResponsesClient
540+
from azure.identity import AzureCliCredential
541+
542+
async def main():
543+
agent = AzureOpenAIResponsesClient(credential=AzureCliCredential()).create_agent(
544+
instructions="You are a helpful programming assistant."
545+
)
546+
547+
# Create a new thread for conversation context
548+
thread = agent.get_new_thread()
549+
550+
# First interaction
551+
result1 = await agent.run("I'm working on a Python web application.", thread=thread, store=True)
552+
print(f"Assistant: {result1.text}")
553+
554+
# Second interaction - context is preserved
555+
result2 = await agent.run("What framework should I use?", thread=thread, store=True)
556+
print(f"Assistant: {result2.text}")
557+
558+
asyncio.run(main())
559+
```
560+
213561
### Streaming Responses
214562

215563
Get responses as they are generated using streaming:

0 commit comments

Comments
 (0)