Skip to content

Commit fa5f44e

Browse files
authored
Implement OpenAI Agents span processing (#3817)
1 parent e3d3817 commit fa5f44e

File tree

22 files changed

+1977
-15
lines changed

22 files changed

+1977
-15
lines changed
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
11
examples/.env
22
examples/openai_agents_multi_agent_travel/.env
3+
examples/**/.env

instrumentation-genai/opentelemetry-instrumentation-openai-agents/CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,3 +9,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
99

1010
- Initial barebones package skeleton: minimal instrumentor stub, version module,
1111
and packaging metadata/entry point.
12+
([#3805](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3805))
13+
- Implement OpenAI Agents span processing aligned with GenAI semantic conventions.
14+
([#3817](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3817))
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
# Update this with your real OpenAI API key
2+
OPENAI_API_KEY=sk-YOUR_API_KEY
3+
4+
# Uncomment and adjust if you use a non-default OTLP collector endpoint
5+
# OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317
6+
# OTEL_EXPORTER_OTLP_PROTOCOL=grpc
7+
8+
OTEL_SERVICE_NAME=opentelemetry-python-openai-agents-manual
9+
10+
# Optionally override the agent name reported on spans
11+
# OTEL_GENAI_AGENT_NAME=Travel Concierge
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
OpenTelemetry OpenAI Agents Instrumentation Example
2+
===================================================
3+
4+
This example demonstrates how to manually configure the OpenTelemetry SDK
5+
alongside the OpenAI Agents instrumentation.
6+
7+
Running `main.py <main.py>`_ produces spans for the end-to-end agent run,
8+
including tool invocations and model generations. Spans are exported through
9+
OTLP/gRPC to the endpoint configured in the environment.
10+
11+
Setup
12+
-----
13+
14+
1. Copy `.env.example <.env.example>`_ to `.env` and update it with your real
15+
``OPENAI_API_KEY``. If your
16+
OTLP collector is not reachable via ``http://localhost:4317``, adjust the
17+
endpoint variables as needed.
18+
2. Create a virtual environment and install the dependencies:
19+
20+
::
21+
22+
python3 -m venv .venv
23+
source .venv/bin/activate
24+
pip install "python-dotenv[cli]"
25+
pip install -r requirements.txt
26+
27+
Run
28+
---
29+
30+
Execute the sample with ``dotenv`` so the environment variables from ``.env``
31+
are applied:
32+
33+
::
34+
35+
dotenv run -- python main.py
36+
37+
The script automatically loads environment variables from ``.env`` so running
38+
``python main.py`` directly also works if the shell already has the required
39+
values exported.
40+
41+
You should see the agent response printed to the console while spans export to
42+
your configured observability backend.
Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
# pylint: skip-file
2+
"""Manual OpenAI Agents instrumentation example."""
3+
4+
from __future__ import annotations
5+
6+
from agents import Agent, Runner, function_tool
7+
from dotenv import load_dotenv
8+
9+
from opentelemetry import trace
10+
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
11+
OTLPSpanExporter,
12+
)
13+
from opentelemetry.instrumentation.openai_agents import (
14+
OpenAIAgentsInstrumentor,
15+
)
16+
from opentelemetry.sdk.trace import TracerProvider
17+
from opentelemetry.sdk.trace.export import BatchSpanProcessor
18+
19+
20+
def configure_otel() -> None:
21+
"""Configure the OpenTelemetry SDK for exporting spans."""
22+
23+
provider = TracerProvider()
24+
provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter()))
25+
trace.set_tracer_provider(provider)
26+
27+
OpenAIAgentsInstrumentor().instrument(tracer_provider=provider)
28+
29+
30+
@function_tool
31+
def get_weather(city: str) -> str:
32+
"""Return a canned weather response for the requested city."""
33+
34+
return f"The forecast for {city} is sunny with pleasant temperatures."
35+
36+
37+
def run_agent() -> None:
38+
"""Create a simple agent and execute a single run."""
39+
40+
assistant = Agent(
41+
name="Travel Concierge",
42+
instructions=(
43+
"You are a concise travel concierge. Use the weather tool when the"
44+
" traveler asks about local conditions."
45+
),
46+
tools=[get_weather],
47+
)
48+
49+
result = Runner.run_sync(
50+
assistant,
51+
"I'm visiting Barcelona this weekend. How should I pack?",
52+
)
53+
54+
print("Agent response:")
55+
print(result.final_output)
56+
57+
58+
def main() -> None:
59+
load_dotenv()
60+
configure_otel()
61+
run_agent()
62+
63+
64+
if __name__ == "__main__":
65+
main()
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
openai-agents~=0.3.3
2+
python-dotenv~=1.0
3+
4+
opentelemetry-sdk~=1.36.0
5+
opentelemetry-exporter-otlp-proto-grpc~=1.36.0
6+
opentelemetry-instrumentation-openai-agents~=0.1.0.dev
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
# Update this with your real OpenAI API key
2+
OPENAI_API_KEY=sk-YOUR_API_KEY
3+
4+
# Uncomment and adjust if you use a non-default OTLP collector endpoint
5+
# OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317
6+
# OTEL_EXPORTER_OTLP_PROTOCOL=grpc
7+
8+
OTEL_SERVICE_NAME=opentelemetry-python-openai-agents-zero-code
9+
10+
# Enable auto-instrumentation for logs if desired
11+
OTEL_PYTHON_LOGGING_AUTO_INSTRUMENTATION_ENABLED=true
12+
13+
# Optionally override the agent name reported on spans
14+
# OTEL_GENAI_AGENT_NAME=Travel Concierge
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
OpenTelemetry OpenAI Agents Zero-Code Instrumentation Example
2+
=============================================================
3+
4+
This example shows how to capture telemetry from OpenAI Agents without
5+
changing your application code by using ``opentelemetry-instrument``.
6+
7+
When `main.py <main.py>`_ is executed, spans describing the agent workflow are
8+
exported to the configured OTLP endpoint. The spans include details such as the
9+
operation name, tool usage, and token consumption (when available).
10+
11+
Setup
12+
-----
13+
14+
1. Copy `.env.example <.env.example>`_ to `.env` and update it with your real
15+
``OPENAI_API_KEY``. Adjust the
16+
OTLP endpoint settings if your collector is not reachable via
17+
``http://localhost:4317``.
18+
2. Create a virtual environment and install the dependencies:
19+
20+
::
21+
22+
python3 -m venv .venv
23+
source .venv/bin/activate
24+
pip install "python-dotenv[cli]"
25+
pip install -r requirements.txt
26+
27+
Run
28+
---
29+
30+
Execute the sample via ``opentelemetry-instrument`` so the OpenAI Agents
31+
instrumentation is activated automatically:
32+
33+
::
34+
35+
dotenv run -- opentelemetry-instrument python main.py
36+
37+
Because ``main.py`` invokes ``load_dotenv``, running ``python main.py`` directly
38+
also works when the required environment variables are already exported.
39+
40+
You should see the agent response printed to the console while spans export to
41+
your observability backend.
Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
"""Zero-code OpenAI Agents example."""
2+
3+
from __future__ import annotations
4+
5+
from agents import Agent, Runner, function_tool
6+
from dotenv import load_dotenv
7+
8+
from opentelemetry import trace
9+
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
10+
OTLPSpanExporter,
11+
)
12+
from opentelemetry.instrumentation.openai_agents import (
13+
OpenAIAgentsInstrumentor,
14+
)
15+
from opentelemetry.sdk.trace import TracerProvider
16+
from opentelemetry.sdk.trace.export import BatchSpanProcessor
17+
18+
19+
def configure_tracing() -> None:
20+
"""Ensure tracing exports spans even without auto-instrumentation."""
21+
22+
current_provider = trace.get_tracer_provider()
23+
if isinstance(current_provider, TracerProvider):
24+
provider = current_provider
25+
else:
26+
provider = TracerProvider()
27+
provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter()))
28+
trace.set_tracer_provider(provider)
29+
30+
OpenAIAgentsInstrumentor().instrument(tracer_provider=provider)
31+
32+
33+
@function_tool
34+
def get_weather(city: str) -> str:
35+
"""Return a canned weather response for the requested city."""
36+
37+
return f"The forecast for {city} is sunny with pleasant temperatures."
38+
39+
40+
def run_agent() -> None:
41+
assistant = Agent(
42+
name="Travel Concierge",
43+
instructions=(
44+
"You are a concise travel concierge. Use the weather tool when the"
45+
" traveler asks about local conditions."
46+
),
47+
tools=[get_weather],
48+
)
49+
50+
result = Runner.run_sync(
51+
assistant,
52+
"I'm visiting Barcelona this weekend. How should I pack?",
53+
)
54+
55+
print("Agent response:")
56+
print(result.final_output)
57+
58+
59+
def main() -> None:
60+
load_dotenv()
61+
configure_tracing()
62+
run_agent()
63+
64+
65+
if __name__ == "__main__":
66+
main()
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
openai-agents~=0.3.3
2+
python-dotenv~=1.0
3+
4+
opentelemetry-sdk~=1.36.0
5+
opentelemetry-exporter-otlp-proto-grpc~=1.36.0
6+
opentelemetry-distro~=0.57b0
7+
opentelemetry-instrumentation-openai-agents~=0.1.0.dev

0 commit comments

Comments
 (0)