Skip to content

Commit

Permalink
chore: upgrade ureq to v3.0
Browse files Browse the repository at this point in the history
  • Loading branch information
decahedron1 committed Feb 14, 2025
1 parent e9367cc commit cae6ea8
Show file tree
Hide file tree
Showing 5 changed files with 35 additions and 37 deletions.
4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -96,14 +96,14 @@ ndarray = { version = "0.16", default-features = false, optional = true }
ort-sys = { version = "=2.0.0-rc.9", path = "ort-sys", default-features = false }
libloading = { version = "0.8", optional = true }

ureq = { version = "2.1", optional = true, default-features = false, features = [ "tls" ] }
ureq = { version = "3", optional = true, default-features = false, features = [ "native-tls" ] }
sha2 = { version = "0.10", optional = true }
tracing = { version = "0.1", optional = true, default-features = false }
half = { version = "2.1", default-features = false, optional = true }

[dev-dependencies]
anyhow = "1.0"
ureq = "2.1"
ureq = { version = "3", default-features = false, features = [ "native-tls" ] }
image = "0.25"
test-log = { version = "0.2", default-features = false, features = [ "trace" ] }
tracing-subscriber = { version = "0.3", default-features = false, features = [ "env-filter", "fmt" ] }
Expand Down
2 changes: 1 addition & 1 deletion ort-sys/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ cann = []
qnn = []

[build-dependencies]
ureq = { version = "2.1", optional = true, default-features = false, features = [ "tls", "socks-proxy" ] }
ureq = { version = "3", optional = true, default-features = false, features = [ "native-tls", "socks-proxy" ] }
tar = { version = "0.4", optional = true }
flate2 = { version = "1.0", optional = true }
sha2 = { version = "0.10", optional = true }
Expand Down
43 changes: 24 additions & 19 deletions ort-sys/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,25 +27,30 @@ use self::internal::dirs::cache_dir;

#[cfg(feature = "download-binaries")]
fn fetch_file(source_url: &str) -> Vec<u8> {
let resp = ureq::AgentBuilder::new()
.try_proxy_from_env(true)
.build()
.get(source_url)
.timeout(std::time::Duration::from_secs(1800))
.call()
.unwrap_or_else(|err| panic!("Failed to GET `{source_url}`: {err}"));

let len = resp
.header("Content-Length")
.and_then(|s| s.parse::<usize>().ok())
.expect("Content-Length header should be present on archive response");
let mut reader = resp.into_reader();
let mut buffer = Vec::new();
reader
.read_to_end(&mut buffer)
.unwrap_or_else(|err| panic!("Failed to download from `{source_url}`: {err}"));
assert_eq!(buffer.len(), len);
buffer
let resp = ureq::Agent::new_with_config(
ureq::config::Config::builder()
.proxy(ureq::Proxy::try_from_env())
.max_redirects(0)
.https_only(true)
.user_agent(format!(
"{}/{} (host {}; for {})",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_VERSION"),
std::env::var("HOST").unwrap(),
std::env::var("TARGET").unwrap()
))
.timeout_global(Some(std::time::Duration::from_secs(1800)))
.build()
)
.get(source_url)
.call()
.unwrap_or_else(|err| panic!("Failed to GET `{source_url}`: {err}"));

resp.into_body()
.into_with_config()
.limit(1_073_741_824)
.read_to_vec()
.unwrap_or_else(|err| panic!("Failed to download from `{source_url}`: {err}"))
}

#[cfg(feature = "download-binaries")]
Expand Down
6 changes: 4 additions & 2 deletions src/session/builder/impl_commit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,12 +50,14 @@ impl SessionBuilder {
let resp = ureq::get(url).call().map_err(|e| Error::new(format!("Error downloading to file: {e}")))?;

let len = resp
.header("Content-Length")
.headers()
.get("Content-Length")
.and_then(|h| h.to_str().ok())
.and_then(|s| s.parse::<usize>().ok())
.expect("Missing Content-Length header");
crate::info!(len, "Downloading {} bytes", len);

let mut reader = resp.into_reader();
let mut reader = resp.into_body().into_with_config().limit(u64::MAX).reader();
let temp_filepath = download_dir.join(format!("tmp_{}.{model_filename}", ort_sys::internal::random_identifier()));

let f = std::fs::File::create(&temp_filepath).expect("Failed to create model file");
Expand Down
17 changes: 4 additions & 13 deletions tests/squeezenet.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
use std::{
fs,
io::{self, BufRead, BufReader},
path::Path,
time::Duration
path::Path
};

use image::{ImageBuffer, Pixel, Rgb, imageops::FilterType};
Expand Down Expand Up @@ -101,21 +100,13 @@ fn get_imagenet_labels() -> ort::Result<Vec<String>> {
if !labels_path.exists() {
let url = "https://s3.amazonaws.com/onnx-model-zoo/synset.txt";
println!("Downloading {:?} to {:?}...", url, labels_path);
let resp = ureq::get(url)
.timeout(Duration::from_secs(180)) // 3 minutes
.call()
.map_err(Error::wrap)?;
let resp = ureq::get(url).call().map_err(Error::wrap)?;

assert!(resp.has("Content-Length"));
let len = resp.header("Content-Length").and_then(|s| s.parse::<usize>().ok()).unwrap();
println!("Downloading {} bytes...", len);

let mut reader = resp.into_reader();
let mut reader = resp.into_body().into_reader();
let f = fs::File::create(&labels_path).unwrap();
let mut writer = io::BufWriter::new(f);

let bytes_io_count = io::copy(&mut reader, &mut writer).unwrap();
assert_eq!(bytes_io_count, len as u64);
io::copy(&mut reader, &mut writer).unwrap();
}

let file = BufReader::new(fs::File::open(labels_path).unwrap());
Expand Down

0 comments on commit cae6ea8

Please sign in to comment.