Skip to content

Commit 75ee862

Browse files
authored
Merge pull request #768 from giles17/openai_agent_features
Added OpenAI Agent Documentation for Agent Features
2 parents 766617a + 0a0b665 commit 75ee862

3 files changed

Lines changed: 425 additions & 11 deletions

File tree

agent-framework/user-guide/agents/agent-types/openai-assistants-agent.md

Lines changed: 112 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -242,6 +242,118 @@ async def code_interpreter_example():
242242
print(result.text)
243243
```
244244

245+
### File Search
246+
247+
Enable your assistant to search through uploaded documents:
248+
249+
```python
250+
from agent_framework import HostedFileSearchTool, HostedVectorStoreContent
251+
252+
async def create_vector_store(client: OpenAIAssistantsClient) -> tuple[str, HostedVectorStoreContent]:
253+
"""Create a vector store with sample documents."""
254+
file = await client.client.files.create(
255+
file=("todays_weather.txt", b"The weather today is sunny with a high of 75F."),
256+
purpose="user_data"
257+
)
258+
vector_store = await client.client.vector_stores.create(
259+
name="knowledge_base",
260+
expires_after={"anchor": "last_active_at", "days": 1},
261+
)
262+
result = await client.client.vector_stores.files.create_and_poll(
263+
vector_store_id=vector_store.id,
264+
file_id=file.id
265+
)
266+
if result.last_error is not None:
267+
raise Exception(f"Vector store file processing failed with status: {result.last_error.message}")
268+
269+
return file.id, HostedVectorStoreContent(vector_store_id=vector_store.id)
270+
271+
async def delete_vector_store(client: OpenAIAssistantsClient, file_id: str, vector_store_id: str) -> None:
272+
"""Delete the vector store after using it."""
273+
await client.client.vector_stores.delete(vector_store_id=vector_store_id)
274+
await client.client.files.delete(file_id=file_id)
275+
276+
async def file_search_example():
277+
print("=== OpenAI Assistants Client Agent with File Search Example ===\n")
278+
279+
client = OpenAIAssistantsClient()
280+
async with ChatAgent(
281+
chat_client=client,
282+
instructions="You are a helpful assistant that searches files in a knowledge base.",
283+
tools=HostedFileSearchTool(),
284+
) as agent:
285+
query = "What is the weather today? Do a file search to find the answer."
286+
file_id, vector_store = await create_vector_store(client)
287+
288+
print(f"User: {query}")
289+
print("Agent: ", end="", flush=True)
290+
async for chunk in agent.run_stream(
291+
query, tool_resources={"file_search": {"vector_store_ids": [vector_store.vector_store_id]}}
292+
):
293+
if chunk.text:
294+
print(chunk.text, end="", flush=True)
295+
print() # New line after streaming
296+
297+
await delete_vector_store(client, file_id, vector_store.vector_store_id)
298+
```
299+
300+
### Thread Management
301+
302+
Maintain conversation context across multiple interactions:
303+
304+
```python
305+
async def thread_example():
306+
async with OpenAIAssistantsClient().create_agent(
307+
name="Assistant",
308+
instructions="You are a helpful assistant.",
309+
) as agent:
310+
# Create a persistent thread for conversation context
311+
thread = agent.get_new_thread()
312+
313+
# First interaction
314+
first_query = "My name is Alice"
315+
print(f"User: {first_query}")
316+
first_result = await agent.run(first_query, thread=thread)
317+
print(f"Agent: {first_result.text}")
318+
319+
# Second interaction - agent remembers the context
320+
second_query = "What's my name?"
321+
print(f"User: {second_query}")
322+
second_result = await agent.run(second_query, thread=thread)
323+
print(f"Agent: {second_result.text}") # Should remember "Alice"
324+
```
325+
326+
### Working with Existing Assistants
327+
328+
You can reuse existing OpenAI assistants by providing their IDs:
329+
330+
```python
331+
from openai import AsyncOpenAI
332+
333+
async def existing_assistant_example():
334+
# Create OpenAI client directly
335+
client = AsyncOpenAI()
336+
337+
# Create or get an existing assistant
338+
assistant = await client.beta.assistants.create(
339+
model="gpt-4o-mini",
340+
name="WeatherAssistant",
341+
instructions="You are a weather forecasting assistant."
342+
)
343+
344+
try:
345+
# Use the existing assistant with Agent Framework
346+
async with OpenAIAssistantsClient(
347+
async_client=client,
348+
assistant_id=assistant.id
349+
).create_agent() as agent:
350+
result = await agent.run("What's the weather like in Seattle?")
351+
print(result.text)
352+
finally:
353+
# Clean up the assistant
354+
await client.beta.assistants.delete(assistant.id)
355+
```
356+
245357
### Streaming Responses
246358

247359
Get responses as they are generated for better user experience:

agent-framework/user-guide/agents/agent-types/openai-chat-completion-agent.md

Lines changed: 68 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -162,6 +162,72 @@ async def tools_example():
162162
print(result.text)
163163
```
164164

165+
### Web Search
166+
167+
Enable real-time web search capabilities:
168+
169+
```python
170+
from agent_framework import HostedWebSearchTool
171+
172+
async def web_search_example():
173+
agent = OpenAIChatClient(model_id="gpt-4o-search-preview").create_agent(
174+
name="SearchBot",
175+
instructions="You are a helpful assistant that can search the web for current information.",
176+
tools=HostedWebSearchTool(),
177+
)
178+
179+
result = await agent.run("What are the latest developments in artificial intelligence?")
180+
print(result.text)
181+
```
182+
183+
### Model Context Protocol (MCP) Tools
184+
185+
Connect to local MCP servers for extended capabilities:
186+
187+
```python
188+
from agent_framework import MCPStreamableHTTPTool
189+
190+
async def local_mcp_example():
191+
agent = OpenAIChatClient().create_agent(
192+
name="DocsAgent",
193+
instructions="You are a helpful assistant that can help with Microsoft documentation.",
194+
tools=MCPStreamableHTTPTool(
195+
name="Microsoft Learn MCP",
196+
url="https://learn.microsoft.com/api/mcp",
197+
),
198+
)
199+
200+
result = await agent.run("How do I create an Azure storage account using az cli?")
201+
print(result.text)
202+
```
203+
204+
### Thread Management
205+
206+
Maintain conversation context across multiple interactions:
207+
208+
```python
209+
async def thread_example():
210+
agent = OpenAIChatClient().create_agent(
211+
name="Agent",
212+
instructions="You are a helpful assistant.",
213+
)
214+
215+
# Create a persistent thread for conversation context
216+
thread = agent.get_new_thread()
217+
218+
# First interaction
219+
first_query = "My name is Alice"
220+
print(f"User: {first_query}")
221+
first_result = await agent.run(first_query, thread=thread)
222+
print(f"Agent: {first_result.text}")
223+
224+
# Second interaction - agent remembers the context
225+
second_query = "What's my name?"
226+
print(f"User: {second_query}")
227+
second_result = await agent.run(second_query, thread=thread)
228+
print(f"Agent: {second_result.text}") # Should remember "Alice"
229+
```
230+
165231
### Streaming Responses
166232

167233
Get responses as they are generated for better user experience:
@@ -172,8 +238,8 @@ async def streaming_example():
172238
name="StoryTeller",
173239
instructions="You are a creative storyteller.",
174240
)
175-
176-
print("Assistant: ", end="", flush=True)
241+
242+
print("Agent: ", end="", flush=True)
177243
async for chunk in agent.run_stream("Tell me a short story about AI."):
178244
if chunk.text:
179245
print(chunk.text, end="", flush=True)

0 commit comments

Comments
 (0)