Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## Unreleased

### Fixed

- `opentelemetry-instrumentation-botocore`: Handle dict input in _decode_tool_use for Bedrock streaming
([#3875](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3875))

## Version 1.38.0/0.59b0 (2025-10-16)

### Fixed
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,16 +36,6 @@
_StreamErrorCallableT = Callable[[Exception], None]


def _decode_tool_use(tool_use):
# input get sent encoded in json
if "input" in tool_use:
try:
tool_use["input"] = json.loads(tool_use["input"])
except json.JSONDecodeError:
pass
return tool_use


# pylint: disable=abstract-method
class ConverseStreamWrapper(ObjectProxy):
"""Wrapper for botocore.eventstream.EventStream"""
Expand Down Expand Up @@ -368,10 +358,13 @@ def _process_anthropic_claude_chunk(self, chunk):
if message_type == "content_block_stop":
# {'type': 'content_block_stop', 'index': 0}
if self._tool_json_input_buf:
self._content_block["input"] = self._tool_json_input_buf
self._message["content"].append(
_decode_tool_use(self._content_block)
)
try:
self._content_block["input"] = json.loads(
self._tool_json_input_buf
)
except json.JSONDecodeError:
self._content_block["input"] = self._tool_json_input_buf
self._message["content"].append(self._content_block)
self._content_block = {}
self._tool_json_input_buf = ""
return
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@
from botocore.eventstream import EventStream, EventStreamError
from botocore.response import StreamingBody

from opentelemetry.instrumentation.botocore.extensions.bedrock_utils import (
InvokeModelWithResponseStreamWrapper,
)
from opentelemetry.semconv._incubating.attributes.error_attributes import (
ERROR_TYPE,
)
Expand Down Expand Up @@ -2975,6 +2978,79 @@ def test_invoke_model_with_response_stream_invalid_model(
assert len(logs) == 0


@pytest.mark.parametrize(
"input_value,expected_output",
[
({"location": "Seattle"}, {"location": "Seattle"}),
({}, {}),
(None, None),
],
)
def test_anthropic_claude_chunk_tool_use_input_handling(
input_value, expected_output
):
"""Test that _process_anthropic_claude_chunk handles various tool_use input formats."""

def stream_done_callback(response, ended):
pass

def stream_error_callback(exc, ended):
pass

wrapper = InvokeModelWithResponseStreamWrapper(
stream=mock.MagicMock(),
stream_done_callback=stream_done_callback,
stream_error_callback=stream_error_callback,
model_id="anthropic.claude-3-5-sonnet-20240620-v1:0",
)

# Simulate message_start
wrapper._process_anthropic_claude_chunk(
{
"type": "message_start",
"message": {
"role": "assistant",
"content": [],
},
}
)

# Simulate content_block_start with specified input
content_block = {
"type": "tool_use",
"id": "test_id",
"name": "test_tool",
}
if input_value is not None:
content_block["input"] = input_value

wrapper._process_anthropic_claude_chunk(
{
"type": "content_block_start",
"index": 0,
"content_block": content_block,
}
)

# Simulate content_block_stop
wrapper._process_anthropic_claude_chunk(
{"type": "content_block_stop", "index": 0}
)

# Verify the message content
assert len(wrapper._message["content"]) == 1
tool_block = wrapper._message["content"][0]
assert tool_block["type"] == "tool_use"
assert tool_block["id"] == "test_id"
assert tool_block["name"] == "test_tool"

if expected_output is not None:
assert tool_block["input"] == expected_output
assert isinstance(tool_block["input"], dict)
else:
assert "input" not in tool_block


def amazon_nova_messages():
return [
{"role": "user", "content": [{"text": "Say this is a test"}]},
Expand Down