-
Notifications
You must be signed in to change notification settings - Fork 33
Rename metadata field to litellm_extra_body and add custom config support #837
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
15a72e6
52c964f
07e08c5
b1d4ebc
c265c5f
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -67,8 +67,12 @@ def select_chat_options( | |
| out.pop("tools", None) | ||
| out.pop("tool_choice", None) | ||
|
|
||
| # Pass through litellm_extra_body if provided | ||
| if llm.litellm_extra_body: | ||
| out["extra_body"] = llm.litellm_extra_body | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We could try it, though I'm a bit concerned it might break e.g. if I set this, use litellm_proxy for a while, change model to test direct Anthropic => probably will error from the API as unexpected parameter? Just a thought. Idk, I'm not sure it's necessary for non-litellm_proxy case |
||
| # non litellm proxy special-case: keep `extra_body` off unless model requires it | ||
| if "litellm_proxy" not in llm.model: | ||
| # or user provided it | ||
| elif "litellm_proxy" not in llm.model: | ||
| out.pop("extra_body", None) | ||
|
|
||
| return out | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,56 @@ | ||
| from unittest.mock import patch | ||
|
|
||
| from litellm.types.utils import ModelResponse | ||
|
|
||
| from openhands.sdk.llm import LLM, Message, TextContent | ||
|
|
||
|
|
||
| def test_litellm_extra_body_passed_to_completion(): | ||
| """Test that litellm_extra_body is correctly passed to litellm.completion().""" | ||
| custom_extra_body = { | ||
| "cluster_id": "prod-cluster-1", | ||
| "routing_key": "high-priority", | ||
| "user_tier": "premium", | ||
| "custom_headers": { | ||
| "X-Request-Source": "openhands-agent", | ||
| }, | ||
| } | ||
|
|
||
| llm = LLM(model="gpt-4o", usage_id="test", litellm_extra_body=custom_extra_body) | ||
| messages = [Message(role="user", content=[TextContent(text="Hello")])] | ||
|
|
||
| with patch("openhands.sdk.llm.llm.litellm_completion") as mock_completion: | ||
| # Create a proper ModelResponse mock | ||
| mock_response = ModelResponse( | ||
| id="test-id", | ||
| choices=[ | ||
| { | ||
| "index": 0, | ||
| "message": {"role": "assistant", "content": "Hello!"}, | ||
| "finish_reason": "stop", | ||
| } | ||
| ], | ||
| created=1234567890, | ||
| model="gpt-4o", | ||
| object="chat.completion", | ||
| ) | ||
| mock_completion.return_value = mock_response | ||
|
|
||
| # Call completion | ||
| llm.completion(messages=messages) | ||
|
|
||
| # Verify that litellm.completion was called with our extra_body | ||
| mock_completion.assert_called_once() | ||
| call_kwargs = mock_completion.call_args[1] | ||
|
|
||
| # Check that extra_body was passed correctly | ||
| assert "extra_body" in call_kwargs | ||
| assert call_kwargs["extra_body"] == custom_extra_body | ||
|
|
||
| # Verify specific custom fields were passed through | ||
| assert call_kwargs["extra_body"]["cluster_id"] == "prod-cluster-1" | ||
| assert call_kwargs["extra_body"]["routing_key"] == "high-priority" | ||
| assert ( | ||
| call_kwargs["extra_body"]["custom_headers"]["X-Request-Source"] | ||
| == "openhands-agent" | ||
| ) |
This file was deleted.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Just to note, I'm pretty sure this is correct. It's not really for LLM itself, it's a field for additional logging / tracking, specially when these folks trace LLMs on the cloud