Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ default-tls = ["reqwest/default-tls", "tokio-tungstenite/native-tls"]
[dependencies.reqwest]
version = "0.12"
default-features = false
features = ["charset", "http2", "json", "multipart", "socks"]
features = ["charset", "http2", "json", "multipart", "socks", "stream"]

[dependencies.tokio]
version = "1"
Expand Down
3 changes: 2 additions & 1 deletion examples/chat_completion.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use openai_api_rs::v1::api::OpenAIClient;
use openai_api_rs::v1::chat_completion::{self, ChatCompletionRequest};
use openai_api_rs::v1::chat_completion::chat_completion::ChatCompletionRequest;
use openai_api_rs::v1::chat_completion::{self};
use openai_api_rs::v1::common::GPT4_O_MINI;
use std::env;

Expand Down
44 changes: 44 additions & 0 deletions examples/chat_completion_stream.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
use futures_util::StreamExt;
use openai_api_rs::v1::api::OpenAIClient;
use openai_api_rs::v1::chat_completion::chat_completion_stream::{
ChatCompletionStreamRequest, ChatCompletionStreamResponse,
};
use openai_api_rs::v1::chat_completion::{self};
use openai_api_rs::v1::common::GPT4_O_MINI;
use std::env;

#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let api_key = env::var("OPENAI_API_KEY").unwrap().to_string();
let mut client = OpenAIClient::builder().with_api_key(api_key).build()?;

let req = ChatCompletionStreamRequest::new(
GPT4_O_MINI.to_string(),
vec![chat_completion::ChatCompletionMessage {
role: chat_completion::MessageRole::user,
content: chat_completion::Content::Text(String::from("What is bitcoin?")),
name: None,
tool_calls: None,
tool_call_id: None,
}],
);

let mut result = client.chat_completion_stream(req).await?;
while let Some(response) = result.next().await {
match response.clone() {
ChatCompletionStreamResponse::ToolCall(toolcalls) => {
println!("Tool Call: {:?}", toolcalls);
}
ChatCompletionStreamResponse::Content(content) => {
println!("Content: {:?}", content);
}
ChatCompletionStreamResponse::Done => {
println!("Done");
}
}
}

Ok(())
}

// OPENAI_API_KEY=xxxx cargo run --package openai-api-rs --example chat_completion_stream
29 changes: 17 additions & 12 deletions examples/function_call.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
use openai_api_rs::v1::api::OpenAIClient;
use openai_api_rs::v1::chat_completion::{self, ChatCompletionRequest};
use openai_api_rs::v1::chat_completion::{
chat_completion::ChatCompletionRequest, ChatCompletionMessage,
};
use openai_api_rs::v1::chat_completion::{
Content, FinishReason, MessageRole, Tool, ToolChoiceType, ToolType,
};
use openai_api_rs::v1::common::GPT4_O;
use openai_api_rs::v1::types;
use serde::{Deserialize, Serialize};
Expand Down Expand Up @@ -32,16 +37,16 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {

let req = ChatCompletionRequest::new(
GPT4_O.to_string(),
vec![chat_completion::ChatCompletionMessage {
role: chat_completion::MessageRole::user,
content: chat_completion::Content::Text(String::from("What is the price of Ethereum?")),
vec![ChatCompletionMessage {
role: MessageRole::user,
content: Content::Text(String::from("What is the price of Ethereum?")),
name: None,
tool_calls: None,
tool_call_id: None,
}],
)
.tools(vec![chat_completion::Tool {
r#type: chat_completion::ToolType::Function,
.tools(vec![Tool {
r#type: ToolType::Function,
function: types::Function {
name: String::from("get_coin_price"),
description: Some(String::from("Get the price of a cryptocurrency")),
Expand All @@ -52,7 +57,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
},
},
}])
.tool_choice(chat_completion::ToolChoiceType::Auto);
.tool_choice(ToolChoiceType::Auto);

// debug request json
// let serialized = serde_json::to_string(&req).unwrap();
Expand All @@ -65,14 +70,14 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
println!("No finish_reason");
println!("{:?}", result.choices[0].message.content);
}
Some(chat_completion::FinishReason::stop) => {
Some(FinishReason::stop) => {
println!("Stop");
println!("{:?}", result.choices[0].message.content);
}
Some(chat_completion::FinishReason::length) => {
Some(FinishReason::length) => {
println!("Length");
}
Some(chat_completion::FinishReason::tool_calls) => {
Some(FinishReason::tool_calls) => {
println!("ToolCalls");
#[derive(Deserialize, Serialize)]
struct Currency {
Expand All @@ -90,10 +95,10 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
}
}
}
Some(chat_completion::FinishReason::content_filter) => {
Some(FinishReason::content_filter) => {
println!("ContentFilter");
}
Some(chat_completion::FinishReason::null) => {
Some(FinishReason::null) => {
println!("Null");
}
}
Expand Down
3 changes: 2 additions & 1 deletion examples/function_call_role.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use openai_api_rs::v1::api::OpenAIClient;
use openai_api_rs::v1::chat_completion::{self, ChatCompletionRequest};
use openai_api_rs::v1::chat_completion::chat_completion::ChatCompletionRequest;
use openai_api_rs::v1::chat_completion::{self};
use openai_api_rs::v1::common::GPT4_O;
use openai_api_rs::v1::types;
use serde::{Deserialize, Serialize};
Expand Down
3 changes: 2 additions & 1 deletion examples/openrouter.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use openai_api_rs::v1::api::OpenAIClient;
use openai_api_rs::v1::chat_completion::{self, ChatCompletionRequest};
use openai_api_rs::v1::chat_completion::chat_completion::ChatCompletionRequest;
use openai_api_rs::v1::chat_completion::{self};
use openai_api_rs::v1::common::GPT4_O_MINI;
use std::env;

Expand Down
5 changes: 2 additions & 3 deletions examples/openrouter_reasoning.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use openai_api_rs::v1::api::OpenAIClient;
use openai_api_rs::v1::chat_completion::{
self, ChatCompletionRequest, Reasoning, ReasoningEffort, ReasoningMode,
};
use openai_api_rs::v1::chat_completion::chat_completion::ChatCompletionRequest;
use openai_api_rs::v1::chat_completion::{self, Reasoning, ReasoningEffort, ReasoningMode};
use std::env;

#[tokio::main]
Expand Down
3 changes: 2 additions & 1 deletion examples/vision.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use openai_api_rs::v1::api::OpenAIClient;
use openai_api_rs::v1::chat_completion::{self, ChatCompletionRequest};
use openai_api_rs::v1::chat_completion::chat_completion::ChatCompletionRequest;
use openai_api_rs::v1::chat_completion::{self};
use openai_api_rs::v1::common::GPT4_O;
use std::env;

Expand Down
42 changes: 40 additions & 2 deletions src/v1/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,10 @@ use crate::v1::audio::{
AudioTranslationRequest, AudioTranslationResponse,
};
use crate::v1::batch::{BatchResponse, CreateBatchRequest, ListBatchResponse};
use crate::v1::chat_completion::{ChatCompletionRequest, ChatCompletionResponse};
use crate::v1::chat_completion::chat_completion::{ChatCompletionRequest, ChatCompletionResponse};
use crate::v1::chat_completion::chat_completion_stream::{
ChatCompletionStream, ChatCompletionStreamRequest, ChatCompletionStreamResponse,
};
use crate::v1::common;
use crate::v1::completion::{CompletionRequest, CompletionResponse};
use crate::v1::edit::{EditRequest, EditResponse};
Expand Down Expand Up @@ -39,11 +42,12 @@ use crate::v1::run::{
use crate::v1::thread::{CreateThreadRequest, ModifyThreadRequest, ThreadObject};

use bytes::Bytes;
use futures_util::Stream;
use reqwest::header::{HeaderMap, HeaderName, HeaderValue};
use reqwest::multipart::{Form, Part};
use reqwest::{Client, Method, Response};
use serde::Serialize;
use serde_json::Value;
use serde_json::{to_value, Value};
use url::Url;

use std::error::Error;
Expand Down Expand Up @@ -334,6 +338,40 @@ impl OpenAIClient {
self.post("chat/completions", &req).await
}

pub async fn chat_completion_stream(
&mut self,
req: ChatCompletionStreamRequest,
) -> Result<impl Stream<Item = ChatCompletionStreamResponse>, APIError> {
let mut payload = to_value(&req).map_err(|err| APIError::CustomError {
message: format!("Failed to serialize request: {}", err),
})?;

if let Some(obj) = payload.as_object_mut() {
obj.insert("stream".into(), Value::Bool(true));
}

let request = self.build_request(Method::POST, "chat/completions").await;
let request = request.json(&payload);
let response = request.send().await?;

if response.status().is_success() {
Ok(ChatCompletionStream {
response: Box::pin(response.bytes_stream()),
buffer: String::new(),
first_chunk: true,
})
} else {
let error_text = response
.text()
.await
.unwrap_or_else(|_| String::from("Unknown error"));

Err(APIError::CustomError {
message: error_text,
})
}
}

pub async fn audio_transcription(
&mut self,
req: AudioTranscriptionRequest,
Expand Down
Loading
Loading