[Bug] Improper closing for task delegation when use streaming mode - OpenTelemetry affected
harrytran001 opened this issue · comments
Description
There are two issues here:
First is with Agno Task Delegation: break Iterator and AsyncIterator without closing proper
Second is with openinference-instrumentation-agno. I realised that even after I fixed the first issue, the Agent in the team execution is the same level and the delegate_task_to_member
Steps to Reproduce
- Enable Langfuse tracing opentelemetry
- Run a team with task delegation
- Checking terminal logs and Langfuse tracing logs.
Agent Configuration (if applicable)
import os
import asyncio
import base64
from uuid import uuid4
from dotenv import load_dotenv
from agno.agent import RunEvent
from agno.agent.agent import Agent
from agno.team import Team, TeamRunEvent
from agno.tools.hackernews import HackerNewsTools
from agno.tools.duckduckgo import DuckDuckGoTools
from agno.models.azure.openai_chat import AzureOpenAI
# Agno Langfuse setup
from openinference.instrumentation.agno import AgnoInstrumentor
from opentelemetry import trace as trace_api
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
langfuse_public_key = ""
langfuse_secret_key = ""
LANGFUSE_AUTH = base64.b64encode(
f"{langfuse_public_key}:{langfuse_secret_key}".encode()
).decode()
os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = (
"https://us.cloud.langfuse.com/api/public/otel"
)
os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Basic {LANGFUSE_AUTH}"
# Configure the tracer provider
tracer_provider = TracerProvider()
tracer_provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter()))
trace_api.set_tracer_provider(tracer_provider=tracer_provider)
# Start instrumenting agno
AgnoInstrumentor().instrument()
load_dotenv()
wikipedia_agent = Agent(
id="hacker-news-agent",
name="Hacker News Agent",
role="Search Hacker News for information",
model=AzureOpenAI(id="gpt-4o-2024-11-20"),
tools=[HackerNewsTools()],
instructions=[
"Find articles about the company in the Hacker News",
],
)
website_agent = Agent(
id="website-agent",
name="Website Agent",
role="Search the website for information",
model=AzureOpenAI(id="gpt-4o-2024-11-20"),
tools=[DuckDuckGoTools()],
instructions=[
"Search the website for information",
],
)
user_id = str(uuid4())
id = str(uuid4())
company_info_team = Team(
name="Company Info Team",
id=id,
user_id=user_id,
model=AzureOpenAI(id="gpt-4o-2024-11-20"),
members=[
wikipedia_agent,
website_agent,
],
markdown=True,
instructions=[
"You are a team that finds information about a company.",
"First search the web and wikipedia for information about the company.",
"If you can find the company's website URL, then scrape the homepage and the about page.",
],
show_members_responses=True,
)
async def run_team_with_events(prompt: str):
content_started = False
async for run_output_event in company_info_team.arun(
prompt,
stream=True,
stream_events=True,
):
if run_output_event.event in [
TeamRunEvent.run_started,
TeamRunEvent.run_completed,
]:
print(f"\nTEAM EVENT: {run_output_event.event}")
if run_output_event.event in [TeamRunEvent.tool_call_started]:
print(f"\nTEAM EVENT: {run_output_event.event}")
print(f"TOOL CALL: {run_output_event.tool.tool_name}")
print(f"TOOL CALL ARGS: {run_output_event.tool.tool_args}")
if run_output_event.event in [TeamRunEvent.tool_call_completed]:
print(f"\nTEAM EVENT: {run_output_event.event}")
print(f"TOOL CALL: {run_output_event.tool.tool_name}")
print(f"TOOL CALL RESULT: {run_output_event.tool.result}")
# Member events
if run_output_event.event in [RunEvent.tool_call_started]:
print(f"\nMEMBER EVENT: {run_output_event.event}")
print(f"AGENT ID: {run_output_event.agent_id}")
print(f"TOOL CALL: {run_output_event.tool.tool_name}")
print(f"TOOL CALL ARGS: {run_output_event.tool.tool_args}")
if run_output_event.event in [RunEvent.tool_call_completed]:
print(f"\nMEMBER EVENT: {run_output_event.event}")
print(f"AGENT ID: {run_output_event.agent_id}")
print(f"TOOL CALL: {run_output_event.tool.tool_name}")
print(f"TOOL CALL RESULT: {run_output_event.tool.result}")
if run_output_event.event in [TeamRunEvent.run_content]:
if not content_started:
print("CONTENT")
content_started = True
else:
print(run_output_event.content, end="")
if __name__ == "__main__":
asyncio.run(
run_team_with_events(
"Write me a full report on everything you can find about Agno, the company building AI agent infrastructure.",
)
)Expected Behavior
No errors should be displayed in terminal and response of task delegation to the agent should be captured.
Actual Behavior
Errors were displayed in terminal and response of task delegation to the agent was not be captured.
Screenshots or Logs (if applicable)
ERROR:opentelemetry.context:Failed to detach context
Traceback (most recent call last):
File "/Users/harry.tran/Library/Caches/pypoetry/virtualenvs/apac-gaia-reg-openai-wrapper-service-3kiMfCOe-py3.10/lib/python3.10/site-packages/opentelemetry/trace/__init__.py", line 589, in use_span
yield span
File "/Users/harry.tran/Library/Caches/pypoetry/virtualenvs/apac-gaia-reg-openai-wrapper-service-3kiMfCOe-py3.10/lib/python3.10/site-packages/openinference/instrumentation/_tracers.py", line 141, in start_as_current_span
yield cast(OpenInferenceSpan, current_span)
File "/Users/harry.tran/Library/Caches/pypoetry/virtualenvs/apac-gaia-reg-openai-wrapper-service-3kiMfCOe-py3.10/lib/python3.10/site-packages/openinference/instrumentation/agno/_wrappers.py", line 499, in arun_stream
yield response
GeneratorExit
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/harry.tran/Library/Caches/pypoetry/virtualenvs/apac-gaia-reg-openai-wrapper-service-3kiMfCOe-py3.10/lib/python3.10/site-packages/opentelemetry/context/__init__.py", line 155, in detach
_RUNTIME_CONTEXT.detach(token)
File "/Users/harry.tran/Library/Caches/pypoetry/virtualenvs/apac-gaia-reg-openai-wrapper-service-3kiMfCOe-py3.10/lib/python3.10/site-packages/opentelemetry/context/contextvars_context.py", line 53, in detach
self._current_context.reset(token)
ValueError: <Token var=<ContextVar name='current_context' default={} at 0x105d3d030> at 0x12e8fda80> was created in a different Context
Environment
- agno: 2.2.8
- openinference-instrumentation-agno: 0.1.22Possible Solutions (optional)
agno
In delegate_task_to_member function in team.py We should not break when consuming the Iterator and AsyncIterator
for member_agent_run_output_event in member_agent_run_response_stream:
# If we get the final response, we can break out of the loop
if isinstance(member_agent_run_output_event, TeamRunOutput) or isinstance(
member_agent_run_output_event, RunOutput
):
member_agent_run_response = member_agent_run_output_event # type: ignore
break # To be removed
# Check if the run is cancelled
check_if_run_cancelled(member_agent_run_output_event)
# Yield the member event directly
member_agent_run_output_event.parent_run_id = (
member_agent_run_output_event.parent_run_id or run_response.run_id
)
yield member_agent_run_output_eventopeninference-instrumentation-agno
I have no solution for this
Additional Context
No response

