Skip to content

Commit 2fef3eb

Browse files
committed
Add spec for multiple tool calls in a single request
1 parent 9df281b commit 2fef3eb

File tree

3 files changed

+307
-2
lines changed

3 files changed

+307
-2
lines changed

README.md

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -682,8 +682,7 @@ response =
682682
message = response.dig("choices", 0, "message")
683683

684684
if message["role"] == "assistant" && message["tool_calls"]
685-
686-
# For a subsequent message with the role "tool", OpenAI requires the preceding message to have a tool_calls argument.
685+
# For a subsequent message with the role "tool", OpenAI requires the preceding message to have a single tool_calls argument.
687686
messages << message
688687

689688
message["tool_calls"].each do |tool_call|

spec/fixtures/cassettes/gpt-3_5-turbo_multiple_tool_calls_full_conversation.yml

Lines changed: 257 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

spec/openai/client/chat_spec.rb

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,55 @@
7777
end
7878
end
7979
end
80+
81+
context "with multiple tool calls" do
82+
let(:cassette) { "#{model} multiple tool calls full conversation".downcase }
83+
let(:messages) do
84+
[
85+
{
86+
"role" => "user",
87+
"content" => "What is the weather like in San Francisco and Japan?"
88+
}
89+
]
90+
end
91+
let(:parameters) do
92+
{
93+
model: model,
94+
messages: messages,
95+
stream: stream,
96+
tools: tools,
97+
tool_choice: "required"
98+
}
99+
end
100+
101+
it "handles full conversation with multiple tool calls" do
102+
VCR.use_cassette(cassette) do
103+
message = response.dig("choices", 0, "message")
104+
105+
if message["role"] == "assistant" && message["tool_calls"]
106+
messages << message
107+
108+
message["tool_calls"].each do |tool_call|
109+
messages << {
110+
tool_call_id: tool_call.dig("id"),
111+
role: "tool",
112+
name: "get_current_weather",
113+
content: "The weather is nice 🌞"
114+
}
115+
end
116+
117+
second_response = OpenAI::Client.new.chat(
118+
parameters: {
119+
model: model,
120+
messages: messages
121+
}
122+
)
123+
124+
expect(second_response.dig("error")).to be_nil
125+
end
126+
end
127+
end
128+
end
80129
end
81130

82131
describe "streaming" do

0 commit comments

Comments
 (0)