Open
Description
Using inside fastapi async endpoint as:
@router.post(
"/experimental/chat-stream/v1",
dependencies=[Depends(validate_user_header)],
)
async def chat_stream(user_input):
result = Runner.run_streamed(deep_research_agent, user_input)
async def event_generator():
final_output = None # Store final output at generator level
async for event in result.stream_events():
print(event)
if event.type == "raw_response_event" and isinstance(event.data, ResponseTextDeltaEvent):
print(event.data.delta, end="", flush=True)
yield f"data: {json.dumps({'type': 'raw_response', 'content': event.data.delta})}\n\n"
continue
if event.type == "raw_response_event":
if hasattr(event.data, "type"):
if event.data.type == "response.output_text.delta":
event_data = {
"type": "agent_response",
"content": getattr(event.data, "delta", ""),
"agent_id": getattr(event.data, "item_id", "unknown")
}
yield f"data: {json.dumps(event_data)}\n\n"
elif event.data.type == "response.output_text.done":
event_data = {
"type": "agent_response_complete",
"text": getattr(event.data, "text", ""),
"agent_id": getattr(event.data, "item_id", "unknown")
}
yield f"data: {json.dumps(event_data)}\n\n"
elif "tool" in getattr(event.data, "type", ""):
tool_data = {
"type": "tool_usage",
"tool_name": getattr(event.data, "name", None),
"input": getattr(event.data, "input", None),
"output": getattr(event.data, "output", None)
}
yield f"data: {json.dumps(tool_data)}\n\n"
elif event.type == "run_item_stream_event":
agent_name = getattr(event.item.agent, "name", None) if hasattr(event.item, "agent") else None
agent_data = {
"type": "agent_info",
"name": getattr(event, "name", "unknown"),
"agent": agent_name
}
yield f"data: {json.dumps(agent_data)}\n\n"
# Capture final output if available during streaming
if hasattr(result, "final_output"):
final_output = result.final_output
# Yield final output after stream completes
if final_output is not None:
yield f"data: {json.dumps({'type': 'complete', 'final_output': final_output})}\n\n"
return StreamingResponse(
event_generator(),
media_type="text/event-stream"
)
gives bug as :
lr-kn-python-api | AgentUpdatedStreamEvent(new_agent=Agent(name='Deep Research Agent', instructions='\nYou are a deep research agent.\nYour job is to analyse the user query take a pause and then analyse all the Agents and tools that you have at your disposal and then make a plan to answer the user query.\nYou will then use the agents and tools to answer the user query.\n', handoff_description=None, handoffs=[Agent(name='Rrf Agent', instructions="You are a Rrf agent. You will help the user search the web and latest results when you don't have the information in your knowledge base.", handoff_description=None, handoffs=[], model=None, model_settings=ModelSettings(temperature=None, top_p=None, frequency_penalty=None, presence_penalty=None, tool_choice=None, parallel_tool_calls=False, truncation=None, max_tokens=None), tools=[FunctionTool(name='rrf_reranking_retrieve_generate_stream_tool', description='Tool wrapper for DeepResearchChain.invoke.', params_json_schema={'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'rrf_reranking_retrieve_generate_stream_tool_args', 'type': 'object', 'additionalProperties': False}, on_invoke_tool=<function function_tool.<locals>._create_function_tool.<locals>._on_invoke_tool at 0x7ff78dc80e00>, strict_json_schema=True)], mcp_servers=[], input_guardrails=[], output_guardrails=[], output_type=None, hooks=None, tool_use_behavior='run_llm_again', reset_tool_choice=True), Agent(name='Web Search Agent', instructions="You are a web search agent. You will help the user search the web and latest results when you don't have the information in your knowledge base.", handoff_description=None, handoffs=[], model=None, model_settings=ModelSettings(temperature=None, top_p=None, frequency_penalty=None, presence_penalty=None, tool_choice=None, parallel_tool_calls=False, truncation=None, max_tokens=None), tools=['web_search'], mcp_servers=[], input_guardrails=[], output_guardrails=[], output_type=None, hooks=None, tool_use_behavior='run_llm_again', reset_tool_choice=True)], model=None, model_settings=ModelSettings(temperature=None, top_p=None, frequency_penalty=None, presence_penalty=None, tool_choice=None, parallel_tool_calls=False, truncation=None, max_tokens=None), tools=[], mcp_servers=[], input_guardrails=[], output_guardrails=[], output_type=None, hooks=None, tool_use_behavior='run_llm_again', reset_tool_choice=True), type='agent_updated_stream_event')
lr-kn-python-api | 2025-04-30 12:40:06,852 - INFO - logger.py - dispatch:11 - POST /v1/experimental/chat-stream/v1
lr-kn-python-api | 2025-04-30 12:40:06,855 - INFO - httptools_impl.py - send:505 - 192.168.65.1:62927 - "POST /v1/experimental/chat-stream/v1?model_name=gpt-4o&agent=FullSourceScan HTTP/1.1" 200
lr-kn-python-api | 2025-04-30 12:40:07,324 - INFO - _client.py - _send_single_request:1786 - HTTP Request: POST https://api.openai.com/v1/responses "HTTP/1.1 200 OK"
lr-kn-python-api | RawResponsesStreamEvent(data=ResponseCreatedEvent(response=Response(id='resp_68121a281b008191a433c6cd72a1982609365e36e448609b', created_at=1746016808.0, error=None, incomplete_details=None, instructions='\nYou are a deep research agent.\nYour job is to analyse the user query take a pause and then analyse all the Agents and tools that you have at your disposal and then make a plan to answer the user query.\nYou will then use the agents and tools to answer the user query.\n', metadata={}, model='gpt-4o-2024-08-06', object='response', output=[], parallel_tool_calls=False, temperature=1.0, tool_choice='auto', tools=[FunctionTool(name='transfer_to_rrf_agent', parameters={'additionalProperties': False, 'type': 'object', 'properties': {}, 'required': []}, strict=True, type='function', description='Handoff to the Rrf Agent agent to handle the request. '), FunctionTool(name='transfer_to_web_search_agent', parameters={'additionalProperties': False, 'type': 'object', 'properties': {}, 'required': []}, strict=True, type='function', description='Handoff to the Web Search Agent agent to handle the request. ')], top_p=1.0, max_output_tokens=None, previous_response_id=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), service_tier='auto', status='in_progress', text=ResponseTextConfig(format=ResponseFormatText(type='text')), truncation='disabled', usage=None, user=None, store=True), type='response.created'), type='raw_response_event')
lr-kn-python-api | RawResponsesStreamEvent(data=ResponseInProgressEvent(response=Response(id='resp_68121a281b008191a433c6cd72a1982609365e36e448609b', created_at=1746016808.0, error=None, incomplete_details=None, instructions='\nYou are a deep research agent.\nYour job is to analyse the user query take a pause and then analyse all the Agents and tools that you have at your disposal and then make a plan to answer the user query.\nYou will then use the agents and tools to answer the user query.\n', metadata={}, model='gpt-4o-2024-08-06', object='response', output=[], parallel_tool_calls=False, temperature=1.0, tool_choice='auto', tools=[FunctionTool(name='transfer_to_rrf_agent', parameters={'additionalProperties': False, 'type': 'object', 'properties': {}, 'required': []}, strict=True, type='function', description='Handoff to the Rrf Agent agent to handle the request. '), FunctionTool(name='transfer_to_web_search_agent', parameters={'additionalProperties': False, 'type': 'object', 'properties': {}, 'required': []}, strict=True, type='function', description='Handoff to the Web Search Agent agent to handle the request. ')], top_p=1.0, max_output_tokens=None, previous_response_id=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), service_tier='auto', status='in_progress', text=ResponseTextConfig(format=ResponseFormatText(type='text')), truncation='disabled', usage=None, user=None, store=True), type='response.in_progress'), type='raw_response_event')
lr-kn-python-api | RawResponsesStreamEvent(data=ResponseOutputItemAddedEvent(item=ResponseOutputMessage(id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', content=[], role='assistant', status='in_progress', type='message'), output_index=0, type='response.output_item.added'), type='raw_response_event')
lr-kn-python-api | RawResponsesStreamEvent(data=ResponseContentPartAddedEvent(content_index=0, item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, part=ResponseOutputText(annotations=[], text='', type='output_text'), type='response.content_part.added'), type='raw_response_event')
lr-kn-python-api | RawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta='It', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | ItRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' looks', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | looksRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' like', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | likeRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' there', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | thereRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' might', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | mightRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' be', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | beRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' a', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | aRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' typo', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | typoRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta='.', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | .RawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' If', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | IfRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' you', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | youRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' meant', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | meantRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' "', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | "RawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta='how', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | howRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta="'s", item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | 'sRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' it', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | itRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' going', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | goingRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=',"', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | ,"RawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' things', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | thingsRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' are', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | areRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' good', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | goodRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' on', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | onRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' my', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | myRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' end', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | endRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta='.', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | .RawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' How', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | HowRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' can', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | canRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' I', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | IRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' assist', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | assistRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' you', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | youRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta=' today', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | todayRawResponsesStreamEvent(data=ResponseTextDeltaEvent(content_index=0, delta='?', item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, type='response.output_text.delta'), type='raw_response_event')
lr-kn-python-api | ?RawResponsesStreamEvent(data=ResponseTextDoneEvent(content_index=0, item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, text='It looks like there might be a typo. If you meant "how\'s it going," things are good on my end. How can I assist you today?', type='response.output_text.done'), type='raw_response_event')
lr-kn-python-api | RawResponsesStreamEvent(data=ResponseContentPartDoneEvent(content_index=0, item_id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', output_index=0, part=ResponseOutputText(annotations=[], text='It looks like there might be a typo. If you meant "how\'s it going," things are good on my end. How can I assist you today?', type='output_text'), type='response.content_part.done'), type='raw_response_event')
lr-kn-python-api | RawResponsesStreamEvent(data=ResponseOutputItemDoneEvent(item=ResponseOutputMessage(id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', content=[ResponseOutputText(annotations=[], text='It looks like there might be a typo. If you meant "how\'s it going," things are good on my end. How can I assist you today?', type='output_text')], role='assistant', status='completed', type='message'), output_index=0, type='response.output_item.done'), type='raw_response_event')
lr-kn-python-api | RawResponsesStreamEvent(data=ResponseCompletedEvent(response=Response(id='resp_68121a281b008191a433c6cd72a1982609365e36e448609b', created_at=1746016808.0, error=None, incomplete_details=None, instructions='\nYou are a deep research agent.\nYour job is to analyse the user query take a pause and then analyse all the Agents and tools that you have at your disposal and then make a plan to answer the user query.\nYou will then use the agents and tools to answer the user query.\n', metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', content=[ResponseOutputText(annotations=[], text='It looks like there might be a typo. If you meant "how\'s it going," things are good on my end. How can I assist you today?', type='output_text')], role='assistant', status='completed', type='message')], parallel_tool_calls=False, temperature=1.0, tool_choice='auto', tools=[FunctionTool(name='transfer_to_rrf_agent', parameters={'additionalProperties': False, 'type': 'object', 'properties': {}, 'required': []}, strict=True, type='function', description='Handoff to the Rrf Agent agent to handle the request. '), FunctionTool(name='transfer_to_web_search_agent', parameters={'additionalProperties': False, 'type': 'object', 'properties': {}, 'required': []}, strict=True, type='function', description='Handoff to the Web Search Agent agent to handle the request. ')], top_p=1.0, max_output_tokens=None, previous_response_id=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text')), truncation='disabled', usage=ResponseUsage(input_tokens=134, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=34, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=168), user=None, store=True), type='response.completed'), type='raw_response_event')
lr-kn-python-api | RunItemStreamEvent(name='message_output_created', item=MessageOutputItem(agent=Agent(name='Deep Research Agent', instructions='\nYou are a deep research agent.\nYour job is to analyse the user query take a pause and then analyse all the Agents and tools that you have at your disposal and then make a plan to answer the user query.\nYou will then use the agents and tools to answer the user query.\n', handoff_description=None, handoffs=[Agent(name='Rrf Agent', instructions="You are a Rrf agent. You will help the user search the web and latest results when you don't have the information in your knowledge base.", handoff_description=None, handoffs=[], model=None, model_settings=ModelSettings(temperature=None, top_p=None, frequency_penalty=None, presence_penalty=None, tool_choice=None, parallel_tool_calls=False, truncation=None, max_tokens=None), tools=[FunctionTool(name='rrf_reranking_retrieve_generate_stream_tool', description='Tool wrapper for DeepResearchChain.invoke.', params_json_schema={'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'rrf_reranking_retrieve_generate_stream_tool_args', 'type': 'object', 'additionalProperties': False}, on_invoke_tool=<function function_tool.<locals>._create_function_tool.<locals>._on_invoke_tool at 0x7ff78dc80e00>, strict_json_schema=True)], mcp_servers=[], input_guardrails=[], output_guardrails=[], output_type=None, hooks=None, tool_use_behavior='run_llm_again', reset_tool_choice=True), Agent(name='Web Search Agent', instructions="You are a web search agent. You will help the user search the web and latest results when you don't have the information in your knowledge base.", handoff_description=None, handoffs=[], model=None, model_settings=ModelSettings(temperature=None, top_p=None, frequency_penalty=None, presence_penalty=None, tool_choice=None, parallel_tool_calls=False, truncation=None, max_tokens=None), tools=['web_search'], mcp_servers=[], input_guardrails=[], output_guardrails=[], output_type=None, hooks=None, tool_use_behavior='run_llm_again', reset_tool_choice=True)], model=None, model_settings=ModelSettings(temperature=None, top_p=None, frequency_penalty=None, presence_penalty=None, tool_choice=None, parallel_tool_calls=False, truncation=None, max_tokens=None), tools=[], mcp_servers=[], input_guardrails=[], output_guardrails=[], output_type=None, hooks=None, tool_use_behavior='run_llm_again', reset_tool_choice=True), raw_item=ResponseOutputMessage(id='msg_68121a288ad881918e0099c6ef9e946809365e36e448609b', content=[ResponseOutputText(annotations=[], text='It looks like there might be a typo. If you meant "how\'s it going," things are good on my end. How can I assist you today?', type='output_text')], role='assistant', status='completed', type='message'), type='message_output_item'), type='run_item_stream_event')
lr-kn-python-api | 2025-04-30 12:40:08,606 - ERROR - httptools_impl.py - run_asgi:440 - Exception in ASGI application
lr-kn-python-api | + Exception Group Traceback (most recent call last):
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/_utils.py", line 87, in collapse_excgroups
lr-kn-python-api | | yield
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/middleware/base.py", line 190, in __call__
lr-kn-python-api | | async with anyio.create_task_group() as task_group:
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/anyio/_backends/_asyncio.py", line 772, in __aexit__
lr-kn-python-api | | raise BaseExceptionGroup(
lr-kn-python-api | | ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
lr-kn-python-api | +-+---------------- 1 ----------------
lr-kn-python-api | | Exception Group Traceback (most recent call last):
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/middleware/base.py", line 192, in __call__
lr-kn-python-api | | await response(scope, wrapped_receive, send)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/responses.py", line 258, in __call__
lr-kn-python-api | | async with anyio.create_task_group() as task_group:
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/anyio/_backends/_asyncio.py", line 772, in __aexit__
lr-kn-python-api | | raise BaseExceptionGroup(
lr-kn-python-api | | ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
lr-kn-python-api | +-+---------------- 1 ----------------
lr-kn-python-api | | Exception Group Traceback (most recent call last):
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/responses.py", line 261, in wrap
lr-kn-python-api | | await func()
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/middleware/base.py", line 217, in stream_response
lr-kn-python-api | | return await super().stream_response(send)
lr-kn-python-api | | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/responses.py", line 250, in stream_response
lr-kn-python-api | | async for chunk in self.body_iterator:
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/middleware/base.py", line 181, in body_stream
lr-kn-python-api | | raise app_exc
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/middleware/base.py", line 151, in coro
lr-kn-python-api | | await self.app(scope, receive_or_disconnect, send_no_error)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/middleware/cors.py", line 93, in __call__
lr-kn-python-api | | await self.simple_response(scope, receive, send, request_headers=headers)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/middleware/cors.py", line 148, in simple_response
lr-kn-python-api | | await self.app(scope, receive, send)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/middleware/exceptions.py", line 65, in __call__
lr-kn-python-api | | await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
lr-kn-python-api | | raise exc
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
lr-kn-python-api | | await app(scope, receive, sender)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/routing.py", line 756, in __call__
lr-kn-python-api | | await self.middleware_stack(scope, receive, send)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/routing.py", line 776, in app
lr-kn-python-api | | await route.handle(scope, receive, send)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/routing.py", line 297, in handle
lr-kn-python-api | | await self.app(scope, receive, send)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/routing.py", line 77, in app
lr-kn-python-api | | await wrap_app_handling_exceptions(app, request)(scope, receive, send)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
lr-kn-python-api | | raise exc
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
lr-kn-python-api | | await app(scope, receive, sender)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/routing.py", line 75, in app
lr-kn-python-api | | await response(scope, receive, send)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/responses.py", line 258, in __call__
lr-kn-python-api | | async with anyio.create_task_group() as task_group:
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/anyio/_backends/_asyncio.py", line 772, in __aexit__
lr-kn-python-api | | raise BaseExceptionGroup(
lr-kn-python-api | | ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
lr-kn-python-api | +-+---------------- 1 ----------------
lr-kn-python-api | | Traceback (most recent call last):
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/uvicorn/protocols/http/httptools_impl.py", line 435, in run_asgi
lr-kn-python-api | | result = await app( # type: ignore[func-returns-value]
lr-kn-python-api | | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/uvicorn/middleware/proxy_headers.py", line 78, in __call__
lr-kn-python-api | | return await self.app(scope, receive, send)
lr-kn-python-api | | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/fastapi/applications.py", line 1054, in __call__
lr-kn-python-api | | await super().__call__(scope, receive, send)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/applications.py", line 123, in __call__
lr-kn-python-api | | await self.middleware_stack(scope, receive, send)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/middleware/errors.py", line 186, in __call__
lr-kn-python-api | | raise exc
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/middleware/errors.py", line 164, in __call__
lr-kn-python-api | | await self.app(scope, receive, _send)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/middleware/base.py", line 189, in __call__
lr-kn-python-api | | with collapse_excgroups():
lr-kn-python-api | | File "/usr/local/lib/python3.11/contextlib.py", line 158, in __exit__
lr-kn-python-api | | self.gen.throw(typ, value, traceback)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/_utils.py", line 93, in collapse_excgroups
lr-kn-python-api | | raise exc
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/responses.py", line 261, in wrap
lr-kn-python-api | | await func()
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/starlette/responses.py", line 250, in stream_response
lr-kn-python-api | | async for chunk in self.body_iterator:
lr-kn-python-api | | File "/workspace/src/routes/chat_stream_route.py", line 65, in event_generator
lr-kn-python-api | | async for event in result.stream_events():
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/agents/result.py", line 181, in stream_events
lr-kn-python-api | | self._trace.finish(reset_current=True)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/agents/tracing/traces.py", line 173, in finish
lr-kn-python-api | | Scope.reset_current_trace(self._prev_context_token)
lr-kn-python-api | | File "/usr/local/lib/python3.11/site-packages/agents/tracing/scope.py", line 45, in reset_current_trace
lr-kn-python-api | | _current_trace.reset(token)
lr-kn-python-api | | ValueError: <Token var=<ContextVar name='current_trace' default=None at 0x7ff78e90cea0> at 0x7ff78c437b80> was created in a different Context
lr-kn-python-api | +------------------------------------
lr-kn-python-api |
lr-kn-python-api | During handling of the above exception, another exception occurred:
lr-kn-python-api |
lr-kn-python-api | Traceback (most recent call last):
lr-kn-python-api | File "/usr/local/lib/python3.11/site-packages/uvicorn/protocols/http/httptools_impl.py", line 435, in run_asgi
lr-kn-python-api | result = await app( # type: ignore[func-returns-value]
lr-kn-python-api | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
lr-kn-python-api | File "/usr/local/lib/python3.11/site-packages/uvicorn/middleware/proxy_headers.py", line 78, in __call__
lr-kn-python-api | return await self.app(scope, receive, send)
lr-kn-python-api | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
lr-kn-python-api | File "/usr/local/lib/python3.11/site-packages/fastapi/applications.py", line 1054, in __call__
lr-kn-python-api | await super().__call__(scope, receive, send)
lr-kn-python-api | File "/usr/local/lib/python3.11/site-packages/starlette/applications.py", line 123, in __call__
lr-kn-python-api | await self.middleware_stack(scope, receive, send)
lr-kn-python-api | File "/usr/local/lib/python3.11/site-packages/starlette/middleware/errors.py", line 186, in __call__
lr-kn-python-api | raise exc
lr-kn-python-api | File "/usr/local/lib/python3.11/site-packages/starlette/middleware/errors.py", line 164, in __call__
lr-kn-python-api | await self.app(scope, receive, _send)
lr-kn-python-api | File "/usr/local/lib/python3.11/site-packages/starlette/middleware/base.py", line 189, in __call__
lr-kn-python-api | with collapse_excgroups():
lr-kn-python-api | File "/usr/local/lib/python3.11/contextlib.py", line 158, in __exit__
lr-kn-python-api | self.gen.throw(typ, value, traceback)
lr-kn-python-api | File "/usr/local/lib/python3.11/site-packages/starlette/_utils.py", line 93, in collapse_excgroups
lr-kn-python-api | raise exc
lr-kn-python-api | File "/usr/local/lib/python3.11/site-packages/starlette/responses.py", line 261, in wrap
lr-kn-python-api | await func()
lr-kn-python-api | File "/usr/local/lib/python3.11/site-packages/starlette/responses.py", line 250, in stream_response
lr-kn-python-api | async for chunk in self.body_iterator:
lr-kn-python-api | File "/workspace/src/routes/chat_stream_route.py", line 65, in event_generator
lr-kn-python-api | async for event in result.stream_events():
lr-kn-python-api | File "/usr/local/lib/python3.11/site-packages/agents/result.py", line 181, in stream_events
lr-kn-python-api | self._trace.finish(reset_current=True)
lr-kn-python-api | File "/usr/local/lib/python3.11/site-packages/agents/tracing/traces.py", line 173, in finish
lr-kn-python-api | Scope.reset_current_trace(self._prev_context_token)
lr-kn-python-api | File "/usr/local/lib/python3.11/site-packages/agents/tracing/scope.py", line 45, in reset_current_trace
lr-kn-python-api | _current_trace.reset(token)
lr-kn-python-api | ValueError: <Token var=<ContextVar name='current_trace' default=None at 0x7ff78e90cea0> at 0x7ff78c437b80> was created in a different Context
lr-kn-py
and agent is :
deep_research_agent = Agent(
name="Deep Research Agent",
instructions=DEEP_RESEARCH_PROMPT,
# model=LitellmModel(),
handoffs=[web_search_agent],
# tools=[rrf_reranking_retrieve_generate_stream_tool, web_search_agent],
)
Debug information
- Agents SDK version: (e.g.
v0.13
) - Python version (e.g. Python 3.13 +)
Expected behavior
A clear and concise description of what you expected to happen.
Metadata
Metadata
Assignees
Labels
Something isn't workingSomething isn't working