@@ -97,6 +97,7 @@ Import the required classes from the Agent Framework:
9797
9898``` python
9999import asyncio
100+ from agent_framework import ChatAgent
100101from agent_framework.openai import OpenAIResponsesClient
101102```
102103
@@ -147,7 +148,7 @@ async def streaming_example():
147148 instructions = " You are a creative storyteller." ,
148149 )
149150
150- print (" Assistant : " , end = " " , flush = True )
151+ print (" Agent : " , end = " " , flush = True )
151152 async for chunk in agent.run_stream(" Tell me a short story about AI." ):
152153 if chunk.text:
153154 print (chunk.text, end = " " , flush = True )
@@ -172,7 +173,7 @@ async def reasoning_example():
172173 reasoning = {" effort" : " high" , " summary" : " detailed" },
173174 )
174175
175- print (" Assistant : " , end = " " , flush = True )
176+ print (" Agent : " , end = " " , flush = True )
176177 async for chunk in agent.run_stream(" Solve: 3x + 11 = 14" ):
177178 if chunk.contents:
178179 for content in chunk.contents:
@@ -270,68 +271,89 @@ async def code_interpreter_example():
270271For data analysis tasks, you can upload files and analyze them with code:
271272
272273``` python
274+ import os
273275import tempfile
276+ from agent_framework import HostedCodeInterpreterTool
277+ from openai import AsyncOpenAI
274278
275279async def code_interpreter_with_files_example ():
276- client = OpenAIResponsesClient()
280+ print (" === OpenAI Code Interpreter with File Upload ===" )
281+
282+ # Create the OpenAI client for file operations
283+ openai_client = AsyncOpenAI()
277284
278285 # Create sample CSV data
279286 csv_data = """ name,department,salary,years_experience
280287Alice Johnson,Engineering,95000,5
281288Bob Smith,Sales,75000,3
282289Carol Williams,Engineering,105000,8
290+ David Brown,Marketing,68000,2
291+ Emma Davis,Sales,82000,4
292+ Frank Wilson,Engineering,88000,6
283293"""
284294
285- # Upload file for analysis
295+ # Create temporary CSV file
286296 with tempfile.NamedTemporaryFile(mode = " w" , suffix = " .csv" , delete = False ) as temp_file:
287297 temp_file.write(csv_data)
288298 temp_file_path = temp_file.name
289299
300+ # Upload file to OpenAI
301+ print (" Uploading file to OpenAI..." )
290302 with open (temp_file_path, " rb" ) as file :
291- uploaded_file = await client.client .files.create(
303+ uploaded_file = await openai_client .files.create(
292304 file = file ,
293- purpose = " assistants" ,
305+ purpose = " assistants" , # Required for code interpreter
294306 )
295307
296- agent = client.create_agent(
297- name = " DataAnalyst" ,
298- instructions = " You are a data analyst that can write and execute Python code." ,
299- tools = HostedCodeInterpreterTool(file_ids = [uploaded_file.id]),
308+ print (f " File uploaded with ID: { uploaded_file.id} " )
309+
310+ # Create agent using OpenAI Responses client
311+ agent = ChatAgent(
312+ chat_client = OpenAIResponsesClient(),
313+ instructions = " You are a helpful assistant that can analyze data files using Python code." ,
314+ tools = HostedCodeInterpreterTool(inputs = [{" file_id" : uploaded_file.id}]),
300315 )
301316
302- result = await agent.run(" Analyze the salary data and create a summary by department." )
303- print (result.text)
317+ # Test the code interpreter with the uploaded file
318+ query = " Analyze the employee data in the uploaded CSV file. Calculate average salary by department."
319+ print (f " User: { query} " )
320+ result = await agent.run(query)
321+ print (f " Agent: { result.text} " )
304322
305- # Cleanup
306- await client.client.files.delete(uploaded_file.id)
323+ # Clean up: delete the uploaded file
324+ await openai_client.files.delete(uploaded_file.id)
325+ print (f " Cleaned up uploaded file: { uploaded_file.id} " )
326+
327+ # Clean up temporary local file
328+ os.unlink(temp_file_path)
329+ print (f " Cleaned up temporary file: { temp_file_path} " )
307330```
308331
309332### Thread Management
310333
311334Maintain conversation context across multiple interactions:
312335
313336``` python
314- from agent_framework import AgentThread
315-
316337async def thread_example ():
317- async with OpenAIResponsesClient().create_agent(
318- name = " Assistant " ,
338+ agent = OpenAIResponsesClient().create_agent(
339+ name = " Agent " ,
319340 instructions = " You are a helpful assistant." ,
320- ) as agent:
321- # Create a persistent thread for conversation context
322- thread = agent.get_new_thread()
323-
324- # First interaction
325- first_query = " My name is Alice"
326- print (f " User: { first_query} " )
327- first_result = await agent.run(first_query, thread = thread)
328- print (f " Agent: { first_result.text} " )
329-
330- # Second interaction - agent remembers the context
331- second_query = " What's my name?"
332- print (f " User: { second_query} " )
333- second_result = await agent.run(second_query, thread = thread)
334- print (f " Agent: { second_result.text} " ) # Should remember "Alice"
341+ )
342+
343+ # Create a persistent thread for conversation context
344+ thread = agent.get_new_thread()
345+
346+ # First interaction
347+ first_query = " My name is Alice"
348+ print (f " User: { first_query} " )
349+ first_result = await agent.run(first_query, thread = thread)
350+ print (f " Agent: { first_result.text} " )
351+
352+ # Second interaction - agent remembers the context
353+ second_query = " What's my name?"
354+ print (f " User: { second_query} " )
355+ second_result = await agent.run(second_query, thread = thread)
356+ print (f " Agent: { second_result.text} " ) # Should remember "Alice"
335357```
336358
337359### File Search
@@ -344,30 +366,44 @@ from agent_framework import HostedFileSearchTool, HostedVectorStoreContent
344366async def file_search_example ():
345367 client = OpenAIResponsesClient()
346368
347- # Create a vector store with documents
369+ # Create a file with sample content
348370 file = await client.client.files.create(
349- file = (" knowledge .txt" , b " The weather today is sunny with a high of 75F." ),
371+ file = (" todays_weather .txt" , b " The weather today is sunny with a high of 75F." ),
350372 purpose = " user_data"
351373 )
374+
375+ # Create a vector store for document storage
352376 vector_store = await client.client.vector_stores.create(
353377 name = " knowledge_base" ,
354378 expires_after = {" anchor" : " last_active_at" , " days" : 1 },
355379 )
356- await client.client.vector_stores.files.create_and_poll(
380+
381+ # Add file to vector store and wait for processing
382+ result = await client.client.vector_stores.files.create_and_poll(
357383 vector_store_id = vector_store.id,
358384 file_id = file .id
359385 )
360-
361- agent = client.create_agent(
362- name = " KnowledgeBot" ,
363- instructions = " You are a helpful assistant that can search through documents." ,
364- tools = HostedFileSearchTool(
365- vector_stores = [HostedVectorStoreContent(vector_store_id = vector_store.id)]
366- ),
386+
387+ # Check if processing was successful
388+ if result.last_error is not None :
389+ raise Exception (f " Vector store file processing failed with status: { result.last_error.message} " )
390+
391+ # Create vector store content reference
392+ vector_store_content = HostedVectorStoreContent(vector_store_id = vector_store.id)
393+
394+ # Create agent with file search capability
395+ agent = ChatAgent(
396+ chat_client = client,
397+ instructions = " You are a helpful assistant that can search through files to find information." ,
398+ tools = [HostedFileSearchTool(inputs = vector_store_content)],
367399 )
368400
369- result = await agent.run(" What does the document say about weather?" )
370- print (result.text)
401+ # Test the file search
402+ message = " What is the weather today? Do a file search to find the answer."
403+ print (f " User: { message} " )
404+
405+ response = await agent.run(message)
406+ print (f " Agent: { response} " )
371407
372408 # Cleanup
373409 await client.client.vector_stores.delete(vector_store.id)
0 commit comments