Skip to content
Open
Show file tree
Hide file tree
Changes from 40 commits
Commits
Show all changes
44 commits
Select commit Hold shift + click to select a range
c2ecece
feat: add auth_store for UploadOpts and update get_auth_store
magentaqin Jul 31, 2025
7796ddd
fix: fix linting
magentaqin Jul 31, 2025
d05406c
feat: add regex package
magentaqin Aug 1, 2025
a490759
feat: check server type by host
magentaqin Aug 1, 2025
9e1a01a
feat: extract Quetz and Artifactory info from host
magentaqin Aug 1, 2025
676a6f7
feat: extract prefix base_url and channel from host
magentaqin Aug 4, 2025
63f33ee
feat: move pixi_progress to rattler
magentaqin Aug 4, 2025
29a1f5c
feat: extract s3 info
magentaqin Aug 4, 2025
fc4a790
feat: make host and server_type optional in UploadOpts
magentaqin Aug 4, 2025
96ea212
feat: prefix upload supports rattler_progress
magentaqin Aug 4, 2025
7718b02
fix: update rattler_progress name
magentaqin Aug 4, 2025
cfc2690
feat: detect server type and extract components from host
magentaqin Aug 4, 2025
9f3b4bb
feat: update Cargo.toml
magentaqin Aug 4, 2025
05e9c12
fix: fix Cargo.toml
magentaqin Aug 4, 2025
00d0a45
fix: fix s3 type
magentaqin Aug 4, 2025
0bf614c
fix: fix import error
magentaqin Aug 4, 2025
af31a6a
chore: cargo fmt
magentaqin Aug 4, 2025
9c8c525
fix: fix linting error
magentaqin Aug 5, 2025
1c0fdd8
fix: fix 'auth_store' type
magentaqin Aug 7, 2025
09777b0
fix: fix linting issue
magentaqin Aug 7, 2025
4573261
Merge branch 'feature/upload-auth-store' into feature/upload-url
magentaqin Aug 8, 2025
1ead736
chore: sync cargo.lock
magentaqin Aug 12, 2025
5f71202
Merge branch 'main' into feature/upload-url
magentaqin Aug 12, 2025
d23039e
refactor: replace RegExp with URL parsing
magentaqin Aug 12, 2025
0958d9f
chore: update default region
magentaqin Aug 12, 2025
db59292
Merge main
magentaqin Aug 21, 2025
8b5e6e8
fmt
magentaqin Aug 21, 2025
99dd9e7
Merge main
magentaqin Aug 26, 2025
9eadb87
refactor: remove unecessary 'rattler_progress'
magentaqin Aug 26, 2025
bb464fb
Merge main and update s3 api
magentaqin Sep 9, 2025
c709b62
fmt
magentaqin Sep 9, 2025
5a824b4
fix: fix the region not working
magentaqin Sep 17, 2025
3ae4aef
Merge main and solve conflicts
magentaqin Sep 17, 2025
d18ba47
feat: optimize progressbar for S3 uploading and responds when package…
magentaqin Sep 23, 2025
09ddbb4
fix: fix lint err
magentaqin Sep 25, 2025
059db57
Merge branch 'feature/upload-url' of github.com:magentaqin/rattler in…
magentaqin Sep 25, 2025
16d1fd0
fix: fix S3 force option
magentaqin Sep 25, 2025
9d62564
feat: add addressing_style for 'S3Opts'
magentaqin Sep 25, 2025
e3fa364
Merge main
magentaqin Sep 29, 2025
54c5c18
Merge main
magentaqin Sep 30, 2025
9bf9bb2
Merge main
magentaqin Oct 6, 2025
5fc6195
refactor: reuse the S3CredentialsOpts
magentaqin Oct 6, 2025
14a0e82
chore: remove crate 'regex'
magentaqin Oct 6, 2025
7650fa2
remove S3 related changes for now
wolfv Oct 13, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 1 addition & 2 deletions crates/rattler_s3/src/clap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,7 @@ impl From<S3CredentialsOpts> for Option<S3Credentials> {
tracing::warn!("The `--force-path-style` option is deprecated, please use `--addressing-style=path` instead.");
value.addressing_style = S3AddressingStyleOpts::Path;
}

if let (Some(endpoint_url), Some(region)) = (value.endpoint_url, value.region) {
if let (Some(endpoint_url), Some(region)) = (value.endpoint_url, Some(value.region)) {
Some(S3Credentials {
endpoint_url,
region,
Expand Down
6 changes: 3 additions & 3 deletions crates/rattler_s3/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,14 @@ pub enum S3AddressingStyle {
/// Rattler based crates always either use S3 credentials specified by the user
/// through CLI arguments combined with credentials coming from `rattler auth`,
/// or everything is loaded through the AWS SDK.
#[derive(Debug, Clone)]
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct S3Credentials {
/// The endpoint URL of the S3 backend
pub endpoint_url: Url,

/// The region of the S3 backend
pub region: String,
pub region: Option<String>,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why is this now optional?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Because it supports upload by URL now, and the region can be written in the URL. For example, the user can upload like this " https://.s3..amazonaws.com/my-channel"


/// The addressing style to use for the bucket.
#[cfg_attr(feature = "serde", serde(default))]
Expand Down Expand Up @@ -149,7 +149,7 @@ impl S3Credentials {

Some(ResolvedS3Credentials {
endpoint_url: self.endpoint_url,
region: self.region,
region: self.region?,
access_key_id,
secret_access_key,
session_token,
Expand Down
1 change: 1 addition & 0 deletions crates/rattler_upload/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ tokio = { workspace = true, features = [
"rt-multi-thread",
"process",
] }
regex = "1.10"
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this used?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'll remove it. It was used before refactor.


[target.'cfg(not(target_os = "windows"))'.dependencies]
sha2 = { workspace = true, features = ["asm"] }
Expand Down
128 changes: 122 additions & 6 deletions crates/rattler_upload/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,26 @@
pub mod upload;
pub(crate) mod utils;

use crate::upload::opt::{AnacondaOpts, ArtifactoryOpts, CondaForgeOpts, PrefixOpts};
use crate::utils::server_util::{
check_server_type, extract_anaconda_info, extract_artifactory_info, extract_conda_forge_info,
extract_prefix_info, extract_quetz_info, SimpleServerType,
};
use crate::utils::tool_configuration;
use miette::IntoDiagnostic;
use rattler_conda_types::package::ArchiveType;
use upload::opt::{
AnacondaData, ArtifactoryData, CondaForgeData, PrefixData, QuetzData, ServerType, UploadOpts,
AnacondaData, ArtifactoryData, CondaForgeData, PrefixData, QuetzData, QuetzOpts, ServerType,
UploadOpts,
};

use crate::utils::tool_configuration;
#[cfg(feature = "s3")]
use crate::upload::opt::{S3Data, S3Opts};
#[cfg(feature = "s3")]
use crate::utils::server_util::extract_s3_info;
#[cfg(feature = "s3")]
use rattler_s3::clap::S3AddressingStyleOpts::VirtualHost;

/// Upload package to different channels
pub async fn upload_from_args(args: UploadOpts) -> miette::Result<()> {
// Validate package files are provided
Expand All @@ -29,8 +42,109 @@ pub async fn upload_from_args(args: UploadOpts) -> miette::Result<()> {
let store = tool_configuration::get_auth_store(args.common.auth_file, args.auth_store)
.into_diagnostic()?;

// Check server type from host (if provided)
let detected_type: SimpleServerType = match &args.host {
Some(host_url) => check_server_type(host_url),
None => SimpleServerType::Unknown,
};

// Use detected type if available, otherwise fall back to provided server_type
let server_type = match detected_type {
SimpleServerType::Unknown => {
// If detection failed, use provided subcommand server_type or return error
match args.server_type {
Some(server_type) => server_type,
None => {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"Cannot determine server type from host and no server type provided",
))
.into_diagnostic()
}
}
}
SimpleServerType::Quetz => {
let host_url = args.host.as_ref().unwrap();
let (base_url, channel) =
extract_quetz_info(host_url).expect("Failed to parse Quetz URL");
ServerType::Quetz(QuetzOpts {
url: base_url,
channels: channel,
api_key: None,
})
}
SimpleServerType::Artifactory => {
let host_url = args.host.as_ref().unwrap();
let (base_url, channel) =
extract_artifactory_info(host_url).expect("Failed to parse Artifactory URL");
ServerType::Artifactory(ArtifactoryOpts {
url: base_url,
channels: channel,
username: None,
password: None,
token: None,
})
}
SimpleServerType::Prefix => {
let host_url = args.host.as_ref().unwrap();
let (base_url, channel) =
extract_prefix_info(host_url).expect("Failed to parse Prefix URL");
ServerType::Prefix(PrefixOpts {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't we fill in e.g.. API key from the args here?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hi, Wolf. There're three things I need to explain.

  1. According to pixi doc, upload doesn't have api_key arg. https://pixi.sh/latest/reference/cli/pixi/upload/#description

  2. The common args was designed like this(not having api_key) option.

Screenshot 2025-09-23 at 09 39 54
  1. There're two ways to upload packages: URL and subcommand.

Only when you specify the subcommand as "prefix", can you pass the arg api_key. But in our case, we upload package through URL(prefix pattern), api_key is not allowed to pass through the args. Users must pass the token by setting them locally.

url: base_url,
channel,
api_key: None,
attestation: None,
skip_existing: false,
})
}
SimpleServerType::Anaconda => {
let host_url = args.host.as_ref().unwrap();
let (base_url, channel) =
extract_anaconda_info(host_url).expect("Failed to parse Anaconda URL");
ServerType::Anaconda(AnacondaOpts {
url: Some(base_url),
channels: Some(channel),
api_key: None,
owner: "".to_string(),
force: false,
})
}
#[cfg(feature = "s3")]
SimpleServerType::S3 => {
let host_url = args.host.as_ref().unwrap();
let (endpoint_url, channel, region) =
extract_s3_info(host_url).expect("Failed to parse S3 URL");
ServerType::S3(S3Opts {
channel,
endpoint_url,
region,
force_path_style: false,
access_key_id: None,
secret_access_key: None,
session_token: None,
credentials: None,
force: false,
addressing_style: VirtualHost,
})
}
SimpleServerType::CondaForge => {
let host_url = args.host.as_ref().unwrap();
let (base_url, channel) =
extract_conda_forge_info(host_url).expect("Failed to parse Conda Forge URL");
ServerType::CondaForge(CondaForgeOpts {
anaconda_url: Some(base_url),
staging_channel: Some(channel),
staging_token: "".to_string(),
feedstock: "".to_string(),
feedstock_token: "".to_string(),
validation_endpoint: None,
provider: None,
dry_run: false,
})
}
};
// Upload handler based on server type
match args.server_type {
match server_type {
ServerType::Quetz(quetz_opts) => {
let quetz_data = QuetzData::from(quetz_opts);
upload::upload_package_to_quetz(&store, &args.package_files, quetz_data).await
Expand All @@ -51,12 +165,14 @@ pub async fn upload_from_args(args: UploadOpts) -> miette::Result<()> {
}
#[cfg(feature = "s3")]
ServerType::S3(s3_opts) => {
let s3_data = S3Data::from(s3_opts);
upload::upload_package_to_s3(
&store,
s3_opts.channel,
s3_opts.credentials.into(),
s3_data.channel,
s3_data.credentials,
&args.package_files,
s3_opts.force,
s3_data.region,
s3_data.force, // force parameter - using false as default
)
.await
}
Expand Down
Loading
Loading