Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
44 commits
Select commit Hold shift + click to select a range
c2ecece
feat: add auth_store for UploadOpts and update get_auth_store
magentaqin Jul 31, 2025
7796ddd
fix: fix linting
magentaqin Jul 31, 2025
d05406c
feat: add regex package
magentaqin Aug 1, 2025
a490759
feat: check server type by host
magentaqin Aug 1, 2025
9e1a01a
feat: extract Quetz and Artifactory info from host
magentaqin Aug 1, 2025
676a6f7
feat: extract prefix base_url and channel from host
magentaqin Aug 4, 2025
63f33ee
feat: move pixi_progress to rattler
magentaqin Aug 4, 2025
29a1f5c
feat: extract s3 info
magentaqin Aug 4, 2025
fc4a790
feat: make host and server_type optional in UploadOpts
magentaqin Aug 4, 2025
96ea212
feat: prefix upload supports rattler_progress
magentaqin Aug 4, 2025
7718b02
fix: update rattler_progress name
magentaqin Aug 4, 2025
cfc2690
feat: detect server type and extract components from host
magentaqin Aug 4, 2025
9f3b4bb
feat: update Cargo.toml
magentaqin Aug 4, 2025
05e9c12
fix: fix Cargo.toml
magentaqin Aug 4, 2025
00d0a45
fix: fix s3 type
magentaqin Aug 4, 2025
0bf614c
fix: fix import error
magentaqin Aug 4, 2025
af31a6a
chore: cargo fmt
magentaqin Aug 4, 2025
9c8c525
fix: fix linting error
magentaqin Aug 5, 2025
1c0fdd8
fix: fix 'auth_store' type
magentaqin Aug 7, 2025
09777b0
fix: fix linting issue
magentaqin Aug 7, 2025
4573261
Merge branch 'feature/upload-auth-store' into feature/upload-url
magentaqin Aug 8, 2025
1ead736
chore: sync cargo.lock
magentaqin Aug 12, 2025
5f71202
Merge branch 'main' into feature/upload-url
magentaqin Aug 12, 2025
d23039e
refactor: replace RegExp with URL parsing
magentaqin Aug 12, 2025
0958d9f
chore: update default region
magentaqin Aug 12, 2025
db59292
Merge main
magentaqin Aug 21, 2025
8b5e6e8
fmt
magentaqin Aug 21, 2025
99dd9e7
Merge main
magentaqin Aug 26, 2025
9eadb87
refactor: remove unecessary 'rattler_progress'
magentaqin Aug 26, 2025
bb464fb
Merge main and update s3 api
magentaqin Sep 9, 2025
c709b62
fmt
magentaqin Sep 9, 2025
5a824b4
fix: fix the region not working
magentaqin Sep 17, 2025
3ae4aef
Merge main and solve conflicts
magentaqin Sep 17, 2025
d18ba47
feat: optimize progressbar for S3 uploading and responds when package…
magentaqin Sep 23, 2025
09ddbb4
fix: fix lint err
magentaqin Sep 25, 2025
059db57
Merge branch 'feature/upload-url' of github.com:magentaqin/rattler in…
magentaqin Sep 25, 2025
16d1fd0
fix: fix S3 force option
magentaqin Sep 25, 2025
9d62564
feat: add addressing_style for 'S3Opts'
magentaqin Sep 25, 2025
e3fa364
Merge main
magentaqin Sep 29, 2025
54c5c18
Merge main
magentaqin Sep 30, 2025
9bf9bb2
Merge main
magentaqin Oct 6, 2025
5fc6195
refactor: reuse the S3CredentialsOpts
magentaqin Oct 6, 2025
14a0e82
chore: remove crate 'regex'
magentaqin Oct 6, 2025
7650fa2
remove S3 related changes for now
wolfv Oct 13, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion crates/rattler_s3/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ pub enum S3AddressingStyle {
/// Rattler based crates always either use S3 credentials specified by the user
/// through CLI arguments combined with credentials coming from `rattler auth`,
/// or everything is loaded through the AWS SDK.
#[derive(Debug, Clone)]
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct S3Credentials {
/// The endpoint URL of the S3 backend
Expand Down
129 changes: 123 additions & 6 deletions crates/rattler_upload/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,26 @@
pub mod upload;
pub(crate) mod utils;

use crate::upload::opt::{AnacondaOpts, ArtifactoryOpts, CondaForgeOpts, PrefixOpts};
use crate::utils::server_util::{
check_server_type, extract_anaconda_info, extract_artifactory_info, extract_conda_forge_info,
extract_prefix_info, extract_quetz_info, SimpleServerType,
};
use crate::utils::tool_configuration;
use miette::IntoDiagnostic;
use rattler_conda_types::package::ArchiveType;
use upload::opt::{
AnacondaData, ArtifactoryData, CondaForgeData, PrefixData, QuetzData, ServerType, UploadOpts,
AnacondaData, ArtifactoryData, CondaForgeData, PrefixData, QuetzData, QuetzOpts, ServerType,
UploadOpts,
};

use crate::utils::tool_configuration;
#[cfg(feature = "s3")]
use crate::upload::opt::{S3Data, S3Opts};
#[cfg(feature = "s3")]
use crate::utils::server_util::extract_s3_info;
#[cfg(feature = "s3")]
use rattler_s3::clap::{S3AddressingStyleOpts, S3CredentialsOpts};

/// Upload package to different channels
pub async fn upload_from_args(args: UploadOpts) -> miette::Result<()> {
// Validate package files are provided
Expand All @@ -29,8 +42,111 @@ pub async fn upload_from_args(args: UploadOpts) -> miette::Result<()> {
let store = tool_configuration::get_auth_store(args.common.auth_file, args.auth_store)
.into_diagnostic()?;

// Check server type from host (if provided)
let detected_type: SimpleServerType = match &args.host {
Some(host_url) => check_server_type(host_url),
None => SimpleServerType::Unknown,
};

// Use detected type if available, otherwise fall back to provided server_type
let server_type = match detected_type {
SimpleServerType::Unknown => {
// If detection failed, use provided subcommand server_type or return error
match args.server_type {
Some(server_type) => server_type,
None => {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"Cannot determine server type from host and no server type provided",
))
.into_diagnostic()
}
}
}
SimpleServerType::Quetz => {
let host_url = args.host.as_ref().unwrap();
let (base_url, channel) =
extract_quetz_info(host_url).expect("Failed to parse Quetz URL");
ServerType::Quetz(QuetzOpts {
url: base_url,
channels: channel,
api_key: None,
})
}
SimpleServerType::Artifactory => {
let host_url = args.host.as_ref().unwrap();
let (base_url, channel) =
extract_artifactory_info(host_url).expect("Failed to parse Artifactory URL");
ServerType::Artifactory(ArtifactoryOpts {
url: base_url,
channels: channel,
username: None,
password: None,
token: None,
})
}
SimpleServerType::Prefix => {
let host_url = args.host.as_ref().unwrap();
let (base_url, channel) =
extract_prefix_info(host_url).expect("Failed to parse Prefix URL");
ServerType::Prefix(PrefixOpts {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't we fill in e.g.. API key from the args here?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hi, Wolf. There're three things I need to explain.

  1. According to pixi doc, upload doesn't have api_key arg. https://pixi.sh/latest/reference/cli/pixi/upload/#description

  2. The common args was designed like this(not having api_key) option.

Screenshot 2025-09-23 at 09 39 54
  1. There're two ways to upload packages: URL and subcommand.

Only when you specify the subcommand as "prefix", can you pass the arg api_key. But in our case, we upload package through URL(prefix pattern), api_key is not allowed to pass through the args. Users must pass the token by setting them locally.

url: base_url,
channel,
api_key: None,
attestation: None,
skip_existing: false,
})
}
SimpleServerType::Anaconda => {
let host_url = args.host.as_ref().unwrap();
let (base_url, channel) =
extract_anaconda_info(host_url).expect("Failed to parse Anaconda URL");
ServerType::Anaconda(AnacondaOpts {
url: Some(base_url),
channels: Some(channel),
api_key: None,
owner: "".to_string(),
force: false,
})
}
#[cfg(feature = "s3")]
SimpleServerType::S3 => {
let host_url = args.host.as_ref().unwrap();
let (endpoint_url, channel, region) =
extract_s3_info(host_url).expect("Failed to parse S3 URL");
ServerType::S3(S3Opts {
channel,
s3_credentials: S3CredentialsOpts {
endpoint_url: Some(endpoint_url),
region: Some(region),
access_key_id: None,
secret_access_key: None,
session_token: None,
addressing_style: S3AddressingStyleOpts::VirtualHost,
force_path_style: None,
},
credentials: None,
force: false,
})
}
SimpleServerType::CondaForge => {
let host_url = args.host.as_ref().unwrap();
let (base_url, channel) =
extract_conda_forge_info(host_url).expect("Failed to parse Conda Forge URL");
ServerType::CondaForge(CondaForgeOpts {
anaconda_url: Some(base_url),
staging_channel: Some(channel),
staging_token: "".to_string(),
feedstock: "".to_string(),
feedstock_token: "".to_string(),
validation_endpoint: None,
provider: None,
dry_run: false,
})
}
};
// Upload handler based on server type
match args.server_type {
match server_type {
ServerType::Quetz(quetz_opts) => {
let quetz_data = QuetzData::from(quetz_opts);
upload::upload_package_to_quetz(&store, &args.package_files, quetz_data).await
Expand All @@ -51,12 +167,13 @@ pub async fn upload_from_args(args: UploadOpts) -> miette::Result<()> {
}
#[cfg(feature = "s3")]
ServerType::S3(s3_opts) => {
let s3_data = S3Data::from(s3_opts);
upload::upload_package_to_s3(
&store,
s3_opts.channel,
s3_opts.credentials.into(),
s3_data.channel,
s3_data.credentials,
&args.package_files,
s3_opts.force,
s3_data.force, // force parameter - using false as default
)
.await
}
Expand Down
131 changes: 109 additions & 22 deletions crates/rattler_upload/src/upload/opt.rs
Original file line number Diff line number Diff line change
@@ -1,19 +1,22 @@
//! Command-line options.
use std::{collections::HashMap, path::PathBuf, str::FromStr};

use clap::{arg, Parser};
use rattler_conda_types::{
utils::url_with_trailing_slash::UrlWithTrailingSlash, NamedChannelOrUrl, Platform,
};
use rattler_conda_types::utils::url_with_trailing_slash::UrlWithTrailingSlash;
use rattler_conda_types::{NamedChannelOrUrl, Platform};
use rattler_networking::mirror_middleware;
use rattler_networking::AuthenticationStorage;
#[cfg(feature = "s3")]
use rattler_networking::s3_middleware;
use rattler_networking::{mirror_middleware, AuthenticationStorage};
use rattler_s3::clap::S3CredentialsOpts;
#[cfg(feature = "s3")]
use rattler_s3::{S3AddressingStyle, S3Credentials};
use rattler_solve::ChannelPriority;
use std::{collections::HashMap, path::PathBuf, str::FromStr};
use tracing::warn;
use url::Url;

/// The configuration type for rattler-build - just extends rattler / pixi
/// config and can load the same TOML files.
#[cfg(feature = "s3")]
use rattler_networking::s3_middleware;

/// The configuration type for rattler-build - just extends rattler / pixi config and can load the same TOML files.
pub type Config = rattler_config::config::ConfigBase<()>;

/// Container for `rattler_solver::ChannelPriority` so that it can be parsed
Expand Down Expand Up @@ -99,8 +102,7 @@ impl CommonData {
allow_insecure_host: Option<Vec<String>>,
) -> Self {
// mirror config
// todo: this is a duplicate in pixi and pixi-pack: do it like in
// `compute_s3_config`
// todo: this is a duplicate in pixi and pixi-pack: do it like in `compute_s3_config`
let mut mirror_config = HashMap::new();
tracing::debug!("Using mirrors: {:?}", config.mirrors);

Expand Down Expand Up @@ -158,13 +160,16 @@ impl CommonData {
/// Upload options.
#[derive(Parser, Debug)]
pub struct UploadOpts {
/// The host + channel (optional if the server type is provided)
pub host: Option<Url>,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There are clap arguments here to make sure that if host is provide that channel must also be provide. Or that it conflicts with server_type. Use those.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hi, Bas! Nice catch! I mean the hostURL should consider "host" + "channel", and in check_server_type(https://github.com/magentaqin/rattler/blob/14a0e826791da57dbe5e02db24597b6cf5b8873a/crates/rattler_upload/src/utils/server_util.rs#L25), I will check whether the URL contains channel.
But you're right, I should use clap arguments to make "host" and "server_type" mutually exclusive.


/// The package file to upload
#[arg(global = true, required = false)]
pub package_files: Vec<PathBuf>,

/// The server type
//// The server type (optional if host is provided)
#[clap(subcommand)]
pub server_type: ServerType,
pub server_type: Option<ServerType>,

/// Common options.
#[clap(flatten)]
Expand Down Expand Up @@ -426,19 +431,102 @@ fn parse_s3_url(value: &str) -> Result<Url, String> {
#[cfg(feature = "s3")]
#[derive(Clone, Debug, PartialEq, Parser)]
pub struct S3Opts {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You can reuse the S3CredentialsOpts from crates/rattler_s3/src/clap.rs here to get a consistent CLI.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

will do it! thanks for reminding!

/// The channel URL in the S3 bucket to upload the package to, e.g.,
/// `s3://my-bucket/my-channel`
/// The channel URL in the S3 bucket to upload the package to, e.g., `s3://my-bucket/my-channel`
#[arg(short, long, env = "S3_CHANNEL", value_parser = parse_s3_url)]
pub channel: Url,

/// S3 credentials
#[clap(flatten)]
pub credentials: rattler_s3::clap::S3CredentialsOpts,
pub s3_credentials: S3CredentialsOpts,

/// Replace files if it already exists.
#[arg(long)]
/// S3 credentials (set programmatically, not via CLI)
#[clap(skip)]
pub credentials: Option<S3Credentials>,

/// Replace files on conflict
#[arg(long, short, env = "ANACONDA_FORCE")]
pub force: bool,
}

#[cfg(feature = "s3")]
#[derive(Debug)]
#[allow(missing_docs)]
pub struct S3Data {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What is the purpose of this struct? The name is quite ambigous. Also, doesnt the rattler_s3 crate provide similar functionality?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You're right, Bas. S3Data was migrated before we had rattler_s3 crate. Let me see how to reuse the functionality!

pub channel: Url,
pub endpoint_url: Url,
pub region: Option<String>,
pub force_path_style: bool,
pub credentials: Option<S3Credentials>,
pub force: bool,
}

#[cfg(feature = "s3")]
impl From<S3Opts> for S3Data {
fn from(value: S3Opts) -> Self {
let addressing_style = value.s3_credentials.addressing_style.into();
let force_path_style = matches!(addressing_style, S3AddressingStyle::Path);

let credentials: Option<S3Credentials> =
if let (Some(access_key_id), Some(secret_access_key)) = (
value.s3_credentials.access_key_id.clone(),
value.s3_credentials.secret_access_key.clone(),
) {
Some(S3Credentials {
endpoint_url: value
.s3_credentials
.endpoint_url
.clone()
.expect("endpoint_url is required"),
region: value
.s3_credentials
.region
.clone()
.expect("region is required"),
addressing_style,
access_key_id: Some(access_key_id),
secret_access_key: Some(secret_access_key),
session_token: value.s3_credentials.session_token.clone(),
})
} else {
value.credentials
};

Self {
channel: value.channel,
endpoint_url: value
.s3_credentials
.endpoint_url
.expect("endpoint_url is required"),
region: value.s3_credentials.region,
force_path_style,
credentials,
force: value.force,
}
}
}

#[cfg(feature = "s3")]
impl S3Data {
/// Create a new instance of `S3Data`
pub fn new(
channel: Url,
endpoint_url: Url,
region: Option<String>,
force_path_style: bool,
credentials: Option<S3Credentials>,
force: bool,
) -> Self {
Self {
channel,
endpoint_url,
region,
force_path_style,
credentials,
force,
}
}
}

#[derive(Debug)]
#[allow(missing_docs)]
pub struct AnacondaData {
Expand Down Expand Up @@ -607,8 +695,7 @@ pub struct DebugOpts {
#[clap(flatten)]
pub common: CommonOpts,

/// Name of the specific output to debug (only required when a recipe has
/// multiple outputs)
/// Name of the specific output to debug (only required when a recipe has multiple outputs)
#[arg(long, help = "Name of the specific output to debug")]
pub output_name: Option<String>,
}
Expand All @@ -635,8 +722,8 @@ pub struct DebugData {
}

impl DebugData {
/// Generate a new `TestData` struct from `TestOpts` and an optional pixi
/// config. `TestOpts` have higher priority than the pixi config.
/// Generate a new `TestData` struct from `TestOpts` and an optional pixi config.
/// `TestOpts` have higher priority than the pixi config.
pub fn from_opts_and_config(opts: DebugOpts, config: Option<Config>) -> Self {
Self {
recipe_path: opts.recipe,
Expand Down
Loading