Skip to content

Commit 82ea63e

Browse files
committed
refactor: migrate _convert_message_to_dict to convert_to_openai_messages
1 parent ce0b1a9 commit 82ea63e

File tree

1 file changed

+15
-63
lines changed
  • libs/partners/openai/langchain_openai/chat_models

1 file changed

+15
-63
lines changed

libs/partners/openai/langchain_openai/chat_models/base.py

Lines changed: 15 additions & 63 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,7 @@
6262
ToolMessage,
6363
ToolMessageChunk,
6464
convert_to_openai_data_block,
65+
convert_to_openai_messages,
6566
is_data_content_block,
6667
)
6768
from langchain_core.messages.ai import (
@@ -231,6 +232,11 @@ def _format_message_content(content: Any) -> Any:
231232
return formatted_content
232233

233234

235+
@deprecated(
236+
since="0.3.60",
237+
alternative="langchain_core.messages.convert_to_openai_messages",
238+
pending=True,
239+
)
234240
def _convert_message_to_dict(message: BaseMessage) -> dict:
235241
"""Convert a LangChain message to a dictionary.
236242
@@ -240,66 +246,12 @@ def _convert_message_to_dict(message: BaseMessage) -> dict:
240246
Returns:
241247
The dictionary.
242248
"""
243-
message_dict: dict[str, Any] = {"content": _format_message_content(message.content)}
244-
if (name := message.name or message.additional_kwargs.get("name")) is not None:
245-
message_dict["name"] = name
246-
247-
# populate role and additional message data
248-
if isinstance(message, ChatMessage):
249-
message_dict["role"] = message.role
250-
elif isinstance(message, HumanMessage):
251-
message_dict["role"] = "user"
252-
elif isinstance(message, AIMessage):
253-
message_dict["role"] = "assistant"
254-
if message.tool_calls or message.invalid_tool_calls:
255-
message_dict["tool_calls"] = [
256-
_lc_tool_call_to_openai_tool_call(tc) for tc in message.tool_calls
257-
] + [
258-
_lc_invalid_tool_call_to_openai_tool_call(tc)
259-
for tc in message.invalid_tool_calls
260-
]
261-
elif "tool_calls" in message.additional_kwargs:
262-
message_dict["tool_calls"] = message.additional_kwargs["tool_calls"]
263-
tool_call_supported_props = {"id", "type", "function"}
264-
message_dict["tool_calls"] = [
265-
{k: v for k, v in tool_call.items() if k in tool_call_supported_props}
266-
for tool_call in message_dict["tool_calls"]
267-
]
268-
elif "function_call" in message.additional_kwargs:
269-
# OpenAI raises 400 if both function_call and tool_calls are present in the
270-
# same message.
271-
message_dict["function_call"] = message.additional_kwargs["function_call"]
272-
else:
273-
pass
274-
# If tool calls present, content null value should be None not empty string.
275-
if "function_call" in message_dict or "tool_calls" in message_dict:
276-
message_dict["content"] = message_dict["content"] or None
277-
278-
if "audio" in message.additional_kwargs:
279-
# openai doesn't support passing the data back - only the id
280-
# https://platform.openai.com/docs/guides/audio/multi-turn-conversations
281-
raw_audio = message.additional_kwargs["audio"]
282-
audio = (
283-
{"id": message.additional_kwargs["audio"]["id"]}
284-
if "id" in raw_audio
285-
else raw_audio
286-
)
287-
message_dict["audio"] = audio
288-
elif isinstance(message, SystemMessage):
289-
message_dict["role"] = message.additional_kwargs.get(
290-
"__openai_role__", "system"
291-
)
292-
elif isinstance(message, FunctionMessage):
293-
message_dict["role"] = "function"
294-
elif isinstance(message, ToolMessage):
295-
message_dict["role"] = "tool"
296-
message_dict["tool_call_id"] = message.tool_call_id
297-
298-
supported_props = {"content", "role", "tool_call_id"}
299-
message_dict = {k: v for k, v in message_dict.items() if k in supported_props}
300-
else:
301-
raise TypeError(f"Got unknown type {message}")
302-
return message_dict
249+
oai_message = convert_to_openai_messages(message)
250+
# The linter does not know that `convert_to_openai_messages` will only return one
251+
# message here.
252+
if isinstance(oai_message, list):
253+
return oai_message[0]
254+
return oai_message
303255

304256

305257
def _convert_delta_to_message_chunk(
@@ -982,7 +934,7 @@ def _get_request_payload(
982934
if self._use_responses_api(payload):
983935
payload = _construct_responses_api_payload(messages, payload)
984936
else:
985-
payload["messages"] = [_convert_message_to_dict(m) for m in messages]
937+
payload["messages"] = convert_to_openai_messages(messages)
986938
return payload
987939

988940
def _create_chat_result(
@@ -1276,7 +1228,7 @@ def get_num_tokens_from_messages(
12761228
" for information on how messages are converted to tokens."
12771229
)
12781230
num_tokens = 0
1279-
messages_dict = [_convert_message_to_dict(m) for m in messages]
1231+
messages_dict = convert_to_openai_messages(messages)
12801232
for message in messages_dict:
12811233
num_tokens += tokens_per_message
12821234
for key, value in message.items():
@@ -3104,7 +3056,7 @@ def _pop_summary_index_from_reasoning(reasoning: dict) -> dict:
31043056
def _construct_responses_api_input(messages: Sequence[BaseMessage]) -> list:
31053057
input_ = []
31063058
for lc_msg in messages:
3107-
msg = _convert_message_to_dict(lc_msg)
3059+
msg = convert_to_openai_messages(lc_msg)
31083060
# "name" parameter unsupported
31093061
if "name" in msg:
31103062
msg.pop("name")

0 commit comments

Comments
 (0)