From 8e67af9cb0bc5ac276c27c16e7685e943671d8ae Mon Sep 17 00:00:00 2001 From: Takayuki Maeda Date: Sat, 18 Oct 2025 02:38:48 +0900 Subject: [PATCH 1/4] update schemars to v1 update ty schema --- Cargo.lock | 33 +- Cargo.toml | 2 +- crates/ruff_linter/src/rule_selector.rs | 114 +-- crates/ruff_linter/src/settings/types.rs | 8 +- crates/ruff_python_ast/Cargo.toml | 3 +- crates/ruff_python_ast/src/name.rs | 22 +- crates/ruff_python_ast/src/python_version.rs | 61 +- crates/ruff_python_formatter/Cargo.toml | 3 +- crates/ruff_python_formatter/src/options.rs | 30 +- crates/ruff_python_semantic/src/imports.rs | 12 +- crates/ruff_text_size/src/schemars_impls.rs | 11 +- crates/ruff_workspace/Cargo.toml | 2 + crates/ruff_workspace/src/options.rs | 49 +- crates/ty_project/Cargo.toml | 2 + crates/ty_project/src/metadata/options.rs | 117 ++- crates/ty_project/src/metadata/value.rs | 29 +- crates/ty_python_semantic/Cargo.toml | 2 + .../ty_python_semantic/src/python_platform.rs | 91 +- ruff.schema.json | 857 +++++++++--------- ty.schema.json | 373 ++++---- 20 files changed, 910 insertions(+), 911 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e5fe849913a163..6cd97bd2ec5c87 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2767,6 +2767,26 @@ dependencies = [ "thiserror 2.0.16", ] +[[package]] +name = "ref-cast" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "regex" version = "1.11.3" @@ -3197,6 +3217,7 @@ dependencies = [ "salsa", "schemars", "serde", + "serde_json", "thiserror 2.0.16", ] @@ -3484,6 +3505,7 @@ dependencies = [ "rustc-hash", "schemars", "serde", + "serde_json", "shellexpand", "strum", "tempfile", @@ -3589,11 +3611,12 @@ dependencies = [ [[package]] name = "schemars" -version = "0.8.22" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" +checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" dependencies = [ "dyn-clone", + "ref-cast", "schemars_derive", "serde", "serde_json", @@ -3601,9 +3624,9 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "0.8.22" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d" +checksum = "33d020396d1d138dc19f1165df7545479dcd58d93810dc5d646a16e55abefa80" dependencies = [ "proc-macro2", "quote", @@ -4384,6 +4407,7 @@ dependencies = [ "salsa", "schemars", "serde", + "serde_json", "thiserror 2.0.16", "toml", "tracing", @@ -4431,6 +4455,7 @@ dependencies = [ "salsa", "schemars", "serde", + "serde_json", "smallvec", "static_assertions", "strsim", diff --git a/Cargo.toml b/Cargo.toml index ad61751a7df24b..5f1112913e4b52 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -152,7 +152,7 @@ salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "ef9f9329be6923ac "salsa_unstable", "inventory", ] } -schemars = { version = "0.8.16" } +schemars = { version = "1.0.4" } seahash = { version = "4.1.0" } serde = { version = "1.0.197", features = ["derive"] } serde-wasm-bindgen = { version = "0.6.4" } diff --git a/crates/ruff_linter/src/rule_selector.rs b/crates/ruff_linter/src/rule_selector.rs index 399c881a340324..b0417f4c1d6893 100644 --- a/crates/ruff_linter/src/rule_selector.rs +++ b/crates/ruff_linter/src/rule_selector.rs @@ -257,9 +257,8 @@ pub struct PreviewOptions { #[cfg(feature = "schemars")] mod schema { use itertools::Itertools; - use schemars::_serde_json::Value; - use schemars::JsonSchema; - use schemars::schema::{InstanceType, Schema, SchemaObject}; + use schemars::{JsonSchema, Schema, SchemaGenerator}; + use serde_json::Value; use strum::IntoEnumIterator; use crate::RuleSelector; @@ -267,64 +266,65 @@ mod schema { use crate::rule_selector::{Linter, RuleCodePrefix}; impl JsonSchema for RuleSelector { - fn schema_name() -> String { - "RuleSelector".to_string() + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("RuleSelector") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> Schema { - Schema::Object(SchemaObject { - instance_type: Some(InstanceType::String.into()), - enum_values: Some( - [ - // Include the non-standard "ALL" selectors. - "ALL".to_string(), - // Include the legacy "C" and "T" selectors. - "C".to_string(), - "T".to_string(), - // Include some common redirect targets for those legacy selectors. - "C9".to_string(), - "T1".to_string(), - "T2".to_string(), - ] - .into_iter() - .chain( - RuleCodePrefix::iter() - .map(|p| { - let prefix = p.linter().common_prefix(); - let code = p.short_code(); - format!("{prefix}{code}") - }) - .chain(Linter::iter().filter_map(|l| { - let prefix = l.common_prefix(); - (!prefix.is_empty()).then(|| prefix.to_string()) - })), - ) - .filter(|p| { - // Exclude any prefixes where all of the rules are removed - if let Ok(Self::Rule { prefix, .. } | Self::Prefix { prefix, .. }) = - RuleSelector::parse_no_redirect(p) - { - !prefix.rules().all(|rule| rule.is_removed()) - } else { - true - } + fn json_schema(_gen: &mut SchemaGenerator) -> Schema { + let enum_values: Vec = [ + // Include the non-standard "ALL" selectors. + "ALL".to_string(), + // Include the legacy "C" and "T" selectors. + "C".to_string(), + "T".to_string(), + // Include some common redirect targets for those legacy selectors. + "C9".to_string(), + "T1".to_string(), + "T2".to_string(), + ] + .into_iter() + .chain( + RuleCodePrefix::iter() + .map(|p| { + let prefix = p.linter().common_prefix(); + let code = p.short_code(); + format!("{prefix}{code}") }) - .filter(|_rule| { - // Filter out all test-only rules - #[cfg(any(feature = "test-rules", test))] - #[expect(clippy::used_underscore_binding)] - if _rule.starts_with("RUF9") || _rule == "PLW0101" { - return false; - } - - true - }) - .sorted() - .map(Value::String) - .collect(), - ), - ..SchemaObject::default() + .chain(Linter::iter().filter_map(|l| { + let prefix = l.common_prefix(); + (!prefix.is_empty()).then(|| prefix.to_string()) + })), + ) + .filter(|p| { + // Exclude any prefixes where all of the rules are removed + if let Ok(Self::Rule { prefix, .. } | Self::Prefix { prefix, .. }) = + RuleSelector::parse_no_redirect(p) + { + !prefix.rules().all(|rule| rule.is_removed()) + } else { + true + } }) + .filter(|_rule| { + // Filter out all test-only rules + #[cfg(any(feature = "test-rules", test))] + #[expect(clippy::used_underscore_binding)] + if _rule.starts_with("RUF9") || _rule == "PLW0101" { + return false; + } + + true + }) + .sorted() + .collect(); + + let mut schema = schemars::json_schema!({ "type": "string" }); + schema.ensure_object().insert( + "enum".to_string(), + Value::Array(enum_values.into_iter().map(Value::String).collect()), + ); + + schema } } } diff --git a/crates/ruff_linter/src/settings/types.rs b/crates/ruff_linter/src/settings/types.rs index 1331035345b676..05cd13909b69bf 100644 --- a/crates/ruff_linter/src/settings/types.rs +++ b/crates/ruff_linter/src/settings/types.rs @@ -617,12 +617,12 @@ impl TryFrom for RequiredVersion { #[cfg(feature = "schemars")] impl schemars::JsonSchema for RequiredVersion { - fn schema_name() -> String { - "RequiredVersion".to_string() + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("RequiredVersion") } - fn json_schema(generator: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - generator.subschema_for::() + fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema { + ::json_schema(generator) } } diff --git a/crates/ruff_python_ast/Cargo.toml b/crates/ruff_python_ast/Cargo.toml index 5a50f4ee74c02c..062870d3e4b1e8 100644 --- a/crates/ruff_python_ast/Cargo.toml +++ b/crates/ruff_python_ast/Cargo.toml @@ -30,10 +30,11 @@ rustc-hash = { workspace = true } salsa = { workspace = true, optional = true } schemars = { workspace = true, optional = true } serde = { workspace = true, optional = true } +serde_json = { workspace = true, optional = true } thiserror = { workspace = true } [features] -schemars = ["dep:schemars"] +schemars = ["dep:schemars", "dep:serde_json"] cache = ["dep:ruff_cache", "dep:ruff_macros"] serde = [ "dep:serde", diff --git a/crates/ruff_python_ast/src/name.rs b/crates/ruff_python_ast/src/name.rs index a1e1376282c96e..143aa6129ab17d 100644 --- a/crates/ruff_python_ast/src/name.rs +++ b/crates/ruff_python_ast/src/name.rs @@ -203,30 +203,26 @@ impl PartialEq for &String { #[cfg(feature = "schemars")] impl schemars::JsonSchema for Name { - fn is_referenceable() -> bool { - String::is_referenceable() - } - - fn schema_name() -> String { - String::schema_name() + fn schema_name() -> std::borrow::Cow<'static, str> { + ::schema_name() } fn schema_id() -> std::borrow::Cow<'static, str> { - String::schema_id() + ::schema_id() } - fn json_schema(generator: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - String::json_schema(generator) + fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema { + ::json_schema(generator) } fn _schemars_private_non_optional_json_schema( - generator: &mut schemars::r#gen::SchemaGenerator, - ) -> schemars::schema::Schema { - String::_schemars_private_non_optional_json_schema(generator) + generator: &mut schemars::SchemaGenerator, + ) -> schemars::Schema { + ::_schemars_private_non_optional_json_schema(generator) } fn _schemars_private_is_option() -> bool { - String::_schemars_private_is_option() + ::_schemars_private_is_option() } } diff --git a/crates/ruff_python_ast/src/python_version.rs b/crates/ruff_python_ast/src/python_version.rs index 82f34f2a484567..71fb99f27a528d 100644 --- a/crates/ruff_python_ast/src/python_version.rs +++ b/crates/ruff_python_ast/src/python_version.rs @@ -188,42 +188,39 @@ mod serde { #[cfg(feature = "schemars")] mod schemars { use super::PythonVersion; - use schemars::_serde_json::Value; - use schemars::JsonSchema; - use schemars::schema::{Metadata, Schema, SchemaObject, SubschemaValidation}; + use schemars::{JsonSchema, Schema, SchemaGenerator}; + use serde_json::Value; impl JsonSchema for PythonVersion { - fn schema_name() -> String { - "PythonVersion".to_string() + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("PythonVersion") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> Schema { - let sub_schemas = std::iter::once(Schema::Object(SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - string: Some(Box::new(schemars::schema::StringValidation { - pattern: Some(r"^\d+\.\d+$".to_string()), - ..Default::default() - })), - ..Default::default() - })) - .chain(Self::iter().map(|v| { - Schema::Object(SchemaObject { - const_value: Some(Value::String(v.to_string())), - metadata: Some(Box::new(Metadata { - description: Some(format!("Python {v}")), - ..Metadata::default() - })), - ..SchemaObject::default() - }) - })); - - Schema::Object(SchemaObject { - subschemas: Some(Box::new(SubschemaValidation { - any_of: Some(sub_schemas.collect()), - ..Default::default() - })), - ..SchemaObject::default() - }) + fn json_schema(_gen: &mut SchemaGenerator) -> Schema { + let string_with_pattern = schemars::json_schema!({ + "type": "string", + "pattern": r"^\\d+\\.\\d+$", + }); + + let mut any_of: Vec = Vec::new(); + any_of.push(string_with_pattern.into()); + + for version in Self::iter() { + let mut schema = schemars::json_schema!({ + "const": version.to_string(), + }); + schema.ensure_object().insert( + "description".to_string(), + Value::String(format!("Python {version}")), + ); + any_of.push(schema.into()); + } + + let mut schema = Schema::default(); + schema + .ensure_object() + .insert("anyOf".to_string(), Value::Array(any_of)); + schema } } } diff --git a/crates/ruff_python_formatter/Cargo.toml b/crates/ruff_python_formatter/Cargo.toml index 1f351ca6223582..95ca60ee10ebcd 100644 --- a/crates/ruff_python_formatter/Cargo.toml +++ b/crates/ruff_python_formatter/Cargo.toml @@ -34,6 +34,7 @@ rustc-hash = { workspace = true } salsa = { workspace = true } serde = { workspace = true, optional = true } schemars = { workspace = true, optional = true } +serde_json = { workspace = true, optional = true } smallvec = { workspace = true } static_assertions = { workspace = true } thiserror = { workspace = true } @@ -66,7 +67,7 @@ serde = [ "ruff_source_file/serde", "ruff_python_ast/serde", ] -schemars = ["dep:schemars", "ruff_formatter/schemars"] +schemars = ["dep:schemars", "dep:serde_json", "ruff_formatter/schemars"] [lints] workspace = true diff --git a/crates/ruff_python_formatter/src/options.rs b/crates/ruff_python_formatter/src/options.rs index ec84fa65fba5a0..5d19dec9cb85aa 100644 --- a/crates/ruff_python_formatter/src/options.rs +++ b/crates/ruff_python_formatter/src/options.rs @@ -403,15 +403,12 @@ pub enum DocstringCodeLineWidth { #[cfg(feature = "schemars")] mod schema { use ruff_formatter::LineWidth; - use schemars::r#gen::SchemaGenerator; - use schemars::schema::{Metadata, Schema, SubschemaValidation}; + use schemars::{Schema, SchemaGenerator}; + use serde_json::Value; /// A dummy type that is used to generate a schema for `DocstringCodeLineWidth::Dynamic`. pub(super) fn dynamic(_: &mut SchemaGenerator) -> Schema { - Schema::Object(schemars::schema::SchemaObject { - const_value: Some("dynamic".to_string().into()), - ..Default::default() - }) + schemars::json_schema!({ "const": "dynamic" }) } // We use a manual schema for `fixed` even thought it isn't strictly necessary according to the @@ -422,19 +419,14 @@ mod schema { // `allOf`. There's no semantic difference between `allOf` and `oneOf` for single element lists. pub(super) fn fixed(generator: &mut SchemaGenerator) -> Schema { let schema = generator.subschema_for::(); - Schema::Object(schemars::schema::SchemaObject { - metadata: Some(Box::new(Metadata { - description: Some( - "Wrap docstring code examples at a fixed line width.".to_string(), - ), - ..Metadata::default() - })), - subschemas: Some(Box::new(SubschemaValidation { - one_of: Some(vec![schema]), - ..SubschemaValidation::default() - })), - ..Default::default() - }) + let mut schema_object = Schema::default(); + let map = schema_object.ensure_object(); + map.insert( + "description".to_string(), + Value::String("Wrap docstring code examples at a fixed line width.".to_string()), + ); + map.insert("oneOf".to_string(), Value::Array(vec![schema.into()])); + schema_object } } diff --git a/crates/ruff_python_semantic/src/imports.rs b/crates/ruff_python_semantic/src/imports.rs index 9f1188a9db12f4..95c54ced42bdb6 100644 --- a/crates/ruff_python_semantic/src/imports.rs +++ b/crates/ruff_python_semantic/src/imports.rs @@ -274,15 +274,11 @@ impl<'de> serde::de::Deserialize<'de> for NameImports { #[cfg(feature = "schemars")] impl schemars::JsonSchema for NameImports { - fn schema_name() -> String { - "NameImports".to_string() + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("NameImports") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - ..Default::default() - } - .into() + fn json_schema(_gen: &mut schemars::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ "type": "string" }) } } diff --git a/crates/ruff_text_size/src/schemars_impls.rs b/crates/ruff_text_size/src/schemars_impls.rs index 4bc9ba2e01dc21..96533343540ffc 100644 --- a/crates/ruff_text_size/src/schemars_impls.rs +++ b/crates/ruff_text_size/src/schemars_impls.rs @@ -6,11 +6,12 @@ //! bindings to the Workspace API use crate::{TextRange, TextSize}; -use schemars::{JsonSchema, r#gen::SchemaGenerator, schema::Schema}; +use schemars::{JsonSchema, Schema, SchemaGenerator}; +use std::borrow::Cow; impl JsonSchema for TextSize { - fn schema_name() -> String { - String::from("TextSize") + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("TextSize") } fn json_schema(r#gen: &mut SchemaGenerator) -> Schema { @@ -21,8 +22,8 @@ impl JsonSchema for TextSize { } impl JsonSchema for TextRange { - fn schema_name() -> String { - String::from("TextRange") + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("TextRange") } fn json_schema(r#gen: &mut SchemaGenerator) -> Schema { diff --git a/crates/ruff_workspace/Cargo.toml b/crates/ruff_workspace/Cargo.toml index f3def3ee9b31d3..9ef114df94881a 100644 --- a/crates/ruff_workspace/Cargo.toml +++ b/crates/ruff_workspace/Cargo.toml @@ -42,6 +42,7 @@ regex = { workspace = true } rustc-hash = { workspace = true } schemars = { workspace = true, optional = true } serde = { workspace = true } +serde_json = { workspace = true, optional = true } shellexpand = { workspace = true } strum = { workspace = true } toml = { workspace = true } @@ -63,6 +64,7 @@ ignored = ["colored"] default = [] schemars = [ "dep:schemars", + "dep:serde_json", "ruff_formatter/schemars", "ruff_linter/schemars", "ruff_python_formatter/schemars", diff --git a/crates/ruff_workspace/src/options.rs b/crates/ruff_workspace/src/options.rs index 0260b5f9e51242..83f6b4866efc81 100644 --- a/crates/ruff_workspace/src/options.rs +++ b/crates/ruff_workspace/src/options.rs @@ -469,6 +469,7 @@ pub struct Options { deny_unknown_fields, rename_all = "kebab-case" )] +#[cfg_attr(feature = "schemars", schemars(!from))] pub struct LintOptions { #[serde(flatten)] pub common: LintCommonOptions, @@ -563,8 +564,8 @@ impl OptionsMetadata for DeprecatedTopLevelLintOptions { #[cfg(feature = "schemars")] impl schemars::JsonSchema for DeprecatedTopLevelLintOptions { - fn schema_name() -> String { - "DeprecatedTopLevelLintOptions".to_owned() + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("DeprecatedTopLevelLintOptions") } fn schema_id() -> std::borrow::Cow<'static, str> { std::borrow::Cow::Borrowed(concat!( @@ -573,28 +574,25 @@ impl schemars::JsonSchema for DeprecatedTopLevelLintOptions { "DeprecatedTopLevelLintOptions" )) } - fn json_schema(generator: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - use schemars::schema::Schema; - - let common_schema = LintCommonOptions::json_schema(generator); - let mut schema_obj = common_schema.into_object(); - - if let Some(object) = schema_obj.object.as_mut() { - for property in object.properties.values_mut() { - if let Schema::Object(property_object) = property { - if let Some(metadata) = &mut property_object.metadata { - metadata.deprecated = true; - } else { - property_object.metadata = Some(Box::new(schemars::schema::Metadata { - deprecated: true, - ..schemars::schema::Metadata::default() - })); - } + fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema { + use serde_json::Value; + + let mut schema = LintCommonOptions::json_schema(generator); + if let Some(properties) = schema + .ensure_object() + .get_mut("properties") + .and_then(|value| value.as_object_mut()) + { + for property in properties.values_mut() { + if let Ok(property_schema) = <&mut schemars::Schema>::try_from(property) { + property_schema + .ensure_object() + .insert("deprecated".to_string(), Value::Bool(true)); } } } - Schema::Object(schema_obj) + schema } } @@ -603,6 +601,7 @@ impl schemars::JsonSchema for DeprecatedTopLevelLintOptions { // Don't add any new options to this struct. Add them to [`LintOptions`] directly to avoid exposing them in the // global settings. #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[cfg_attr(feature = "schemars", schemars(inline))] #[derive( Clone, Debug, PartialEq, Eq, Default, OptionsMetadata, CombineOptions, Serialize, Deserialize, )] @@ -3891,6 +3890,7 @@ pub struct AnalyzeOptions { /// Like [`LintCommonOptions`], but with any `#[serde(flatten)]` fields inlined. This leads to far, /// far better error messages when deserializing. +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug, Clone, Default, Deserialize)] #[serde(rename_all = "kebab-case", deny_unknown_fields)] pub struct LintOptionsWire { @@ -3938,8 +3938,8 @@ pub struct LintOptionsWire { pyflakes: Option, pylint: Option, pyupgrade: Option, - per_file_ignores: Option>>, - extend_per_file_ignores: Option>>, + per_file_ignores: Option>>, + extend_per_file_ignores: Option>>, exclude: Option>, pydoclint: Option, @@ -4005,6 +4005,11 @@ impl From for LintOptions { future_annotations, } = value; + let per_file_ignores = + per_file_ignores.map(|map| map.into_iter().collect::>()); + let extend_per_file_ignores = + extend_per_file_ignores.map(|map| map.into_iter().collect::>()); + LintOptions { #[expect(deprecated)] common: LintCommonOptions { diff --git a/crates/ty_project/Cargo.toml b/crates/ty_project/Cargo.toml index 7fe125e534fd1a..74356dac24c229 100644 --- a/crates/ty_project/Cargo.toml +++ b/crates/ty_project/Cargo.toml @@ -41,6 +41,7 @@ rustc-hash = { workspace = true } salsa = { workspace = true } schemars = { workspace = true, optional = true } serde = { workspace = true } +serde_json = { workspace = true, optional = true } thiserror = { workspace = true } toml = { workspace = true } tracing = { workspace = true } @@ -54,6 +55,7 @@ default = ["zstd"] deflate = ["ty_vendored/deflate"] schemars = [ "dep:schemars", + "dep:serde_json", "ruff_db/schemars", "ruff_python_ast/schemars", "ty_python_semantic/schemars", diff --git a/crates/ty_project/src/metadata/options.rs b/crates/ty_project/src/metadata/options.rs index 08a1582f93fceb..2245bfa16d4fbd 100644 --- a/crates/ty_project/src/metadata/options.rs +++ b/crates/ty_project/src/metadata/options.rs @@ -784,9 +784,7 @@ impl SrcOptions { Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize, Hash, get_size2::GetSize, )] #[serde(rename_all = "kebab-case", transparent)] -#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct Rules { - #[cfg_attr(feature = "schemars", schemars(with = "schema::Rules"))] #[get_size(ignore)] // TODO: Add `GetSize` support for `OrderMap`. inner: OrderMap, RangedValue, BuildHasherDefault>, } @@ -801,6 +799,59 @@ impl FromIterator<(RangedValue, RangedValue)> for Rules { } } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for Rules { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("Rules") + } + + fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema { + use serde_json::{Map, Value}; + + let registry = ty_python_semantic::default_lint_registry(); + let level_schema = generator.subschema_for::(); + + let properties: Map = registry + .lints() + .iter() + .map(|lint| { + let mut schema = schemars::Schema::default(); + let object = schema.ensure_object(); + object.insert( + "title".to_string(), + Value::String(lint.summary().to_string()), + ); + object.insert( + "description".to_string(), + Value::String(lint.documentation()), + ); + if lint.status.is_deprecated() { + object.insert("deprecated".to_string(), Value::Bool(true)); + } + object.insert( + "default".to_string(), + Value::String(lint.default_level.to_string()), + ); + object.insert( + "oneOf".to_string(), + Value::Array(vec![level_schema.clone().into()]), + ); + + (lint.name().to_string(), schema.into()) + }) + .collect(); + + let mut schema = schemars::json_schema!({ "type": "object" }); + let object = schema.ensure_object(); + object.insert("properties".to_string(), Value::Object(properties)); + // Allow unknown rules: ty will warn about them. It gives a better experience when using an older + // ty version because the schema will not deny rules that have been removed in newer versions. + object.insert("additionalProperties".to_string(), level_schema.into()); + + schema + } +} + impl Rules { /// Convert the rules to a `RuleSelection` with diagnostics. pub fn to_rule_selection( @@ -1533,68 +1584,6 @@ impl Display for ToSettingsError { impl std::error::Error for ToSettingsError {} -#[cfg(feature = "schemars")] -mod schema { - use schemars::JsonSchema; - use schemars::r#gen::SchemaGenerator; - use schemars::schema::{ - InstanceType, Metadata, ObjectValidation, Schema, SchemaObject, SubschemaValidation, - }; - use ty_python_semantic::lint::Level; - - pub(super) struct Rules; - - impl JsonSchema for Rules { - fn schema_name() -> String { - "Rules".to_string() - } - - fn json_schema(generator: &mut SchemaGenerator) -> Schema { - let registry = ty_python_semantic::default_lint_registry(); - - let level_schema = generator.subschema_for::(); - - let properties: schemars::Map = registry - .lints() - .iter() - .map(|lint| { - ( - lint.name().to_string(), - Schema::Object(SchemaObject { - metadata: Some(Box::new(Metadata { - title: Some(lint.summary().to_string()), - description: Some(lint.documentation()), - deprecated: lint.status.is_deprecated(), - default: Some(lint.default_level.to_string().into()), - ..Metadata::default() - })), - subschemas: Some(Box::new(SubschemaValidation { - one_of: Some(vec![level_schema.clone()]), - ..Default::default() - })), - ..Default::default() - }), - ) - }) - .collect(); - - Schema::Object(SchemaObject { - instance_type: Some(InstanceType::Object.into()), - object: Some(Box::new(ObjectValidation { - properties, - // Allow unknown rules: ty will warn about them. - // It gives a better experience when using an older ty version because - // the schema will not deny rules that have been removed in newer versions. - additional_properties: Some(Box::new(level_schema)), - ..ObjectValidation::default() - })), - - ..Default::default() - }) - } - } -} - #[derive(Error, Debug)] pub enum TyTomlError { #[error(transparent)] diff --git a/crates/ty_project/src/metadata/value.rs b/crates/ty_project/src/metadata/value.rs index 95a157a45180b9..f1f08d718a8e65 100644 --- a/crates/ty_project/src/metadata/value.rs +++ b/crates/ty_project/src/metadata/value.rs @@ -86,7 +86,6 @@ impl Drop for ValueSourceGuard { /// or if the values were loaded from different sources. #[derive(Clone, serde::Serialize, get_size2::GetSize)] #[serde(transparent)] -#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct RangedValue { value: T, #[serde(skip)] @@ -100,6 +99,34 @@ pub struct RangedValue { range: Option, } +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for RangedValue +where + T: schemars::JsonSchema, +{ + fn schema_name() -> std::borrow::Cow<'static, str> { + T::schema_name() + } + + fn schema_id() -> std::borrow::Cow<'static, str> { + T::schema_id() + } + + fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema { + T::json_schema(generator) + } + + fn _schemars_private_non_optional_json_schema( + generator: &mut schemars::SchemaGenerator, + ) -> schemars::Schema { + T::_schemars_private_non_optional_json_schema(generator) + } + + fn _schemars_private_is_option() -> bool { + T::_schemars_private_is_option() + } +} + impl RangedValue { pub fn new(value: T, source: ValueSource) -> Self { Self::with_range(value, source, TextRange::default()) diff --git a/crates/ty_python_semantic/Cargo.toml b/crates/ty_python_semantic/Cargo.toml index ef81237b5e6202..edeafa821ff666 100644 --- a/crates/ty_python_semantic/Cargo.toml +++ b/crates/ty_python_semantic/Cargo.toml @@ -43,6 +43,7 @@ rustc-hash = { workspace = true } hashbrown = { workspace = true } schemars = { workspace = true, optional = true } serde = { workspace = true, optional = true } +serde_json = { workspace = true, optional = true } smallvec = { workspace = true } static_assertions = { workspace = true } test-case = { workspace = true } @@ -68,6 +69,7 @@ quickcheck = { version = "1.0.3", default-features = false } quickcheck_macros = { version = "1.0.0" } [features] +schemars = ["dep:schemars", "dep:serde_json"] serde = ["ruff_db/serde", "dep:serde", "ruff_python_ast/serde"] testing = [] diff --git a/crates/ty_python_semantic/src/python_platform.rs b/crates/ty_python_semantic/src/python_platform.rs index 6f0c0fbab4d10f..33c60d6a48692a 100644 --- a/crates/ty_python_semantic/src/python_platform.rs +++ b/crates/ty_python_semantic/src/python_platform.rs @@ -58,77 +58,42 @@ impl Default for PythonPlatform { mod schema { use crate::PythonPlatform; use ruff_db::RustDoc; - use schemars::_serde_json::Value; - use schemars::JsonSchema; - use schemars::r#gen::SchemaGenerator; - use schemars::schema::{Metadata, Schema, SchemaObject, SubschemaValidation}; + use schemars::{JsonSchema, Schema, SchemaGenerator}; + use serde_json::Value; impl JsonSchema for PythonPlatform { - fn schema_name() -> String { - "PythonPlatform".to_string() + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("PythonPlatform") } fn json_schema(_gen: &mut SchemaGenerator) -> Schema { - Schema::Object(SchemaObject { - // Hard code some well known values, but allow any other string as well. - subschemas: Some(Box::new(SubschemaValidation { - any_of: Some(vec![ - Schema::Object(SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - ..SchemaObject::default() - }), - // Promote well-known values for better auto-completion. - // Using `const` over `enumValues` as recommended [here](https://github.com/SchemaStore/schemastore/blob/master/CONTRIBUTING.md#documenting-enums). - Schema::Object(SchemaObject { - const_value: Some(Value::String("all".to_string())), - metadata: Some(Box::new(Metadata { - description: Some( - "Do not make any assumptions about the target platform." - .to_string(), - ), - ..Metadata::default() - })), + fn constant(value: &str, description: &str) -> Value { + let mut schema = schemars::json_schema!({ "const": value }); + schema.ensure_object().insert( + "description".to_string(), + Value::String(description.to_string()), + ); + schema.into() + } - ..SchemaObject::default() - }), - Schema::Object(SchemaObject { - const_value: Some(Value::String("darwin".to_string())), - metadata: Some(Box::new(Metadata { - description: Some("Darwin".to_string()), - ..Metadata::default() - })), + let mut any_of = vec![schemars::json_schema!({ "type": "string" }).into()]; + any_of.push(constant( + "all", + "Do not make any assumptions about the target platform.", + )); + any_of.push(constant("darwin", "Darwin")); + any_of.push(constant("linux", "Linux")); + any_of.push(constant("win32", "Windows")); - ..SchemaObject::default() - }), - Schema::Object(SchemaObject { - const_value: Some(Value::String("linux".to_string())), - metadata: Some(Box::new(Metadata { - description: Some("Linux".to_string()), - ..Metadata::default() - })), + let mut schema = Schema::default(); + let object = schema.ensure_object(); + object.insert("anyOf".to_string(), Value::Array(any_of)); + object.insert( + "description".to_string(), + Value::String(::rust_doc().to_string()), + ); - ..SchemaObject::default() - }), - Schema::Object(SchemaObject { - const_value: Some(Value::String("win32".to_string())), - metadata: Some(Box::new(Metadata { - description: Some("Windows".to_string()), - ..Metadata::default() - })), - - ..SchemaObject::default() - }), - ]), - - ..SubschemaValidation::default() - })), - metadata: Some(Box::new(Metadata { - description: Some(::rust_doc().to_string()), - ..Metadata::default() - })), - - ..SchemaObject::default() - }) + schema } } } diff --git a/ruff.schema.json b/ruff.schema.json index b44f308d65055a..74783d4b324cb1 100644 --- a/ruff.schema.json +++ b/ruff.schema.json @@ -1,15 +1,15 @@ { - "$schema": "http://json-schema.org/draft-07/schema#", + "$schema": "https://json-schema.org/draft/2020-12/schema", "title": "Options", "type": "object", "properties": { "allowed-confusables": { - "description": "A list of allowed \"confusable\" Unicode characters to ignore when enforcing `RUF001`, `RUF002`, and `RUF003`.", - "deprecated": true, + "description": "A list of allowed \"confusable\" Unicode characters to ignore when\nenforcing `RUF001`, `RUF002`, and `RUF003`.", "type": [ "array", "null" ], + "deprecated": true, "items": { "type": "string", "maxLength": 1, @@ -20,7 +20,7 @@ "description": "Options to configure import map generation.", "anyOf": [ { - "$ref": "#/definitions/AnalyzeOptions" + "$ref": "#/$defs/AnalyzeOptions" }, { "type": "null" @@ -28,7 +28,7 @@ ] }, "builtins": { - "description": "A list of builtins to treat as defined references, in addition to the system builtins.", + "description": "A list of builtins to treat as defined references, in addition to the\nsystem builtins.", "type": [ "array", "null" @@ -38,22 +38,22 @@ } }, "cache-dir": { - "description": "A path to the cache directory.\n\nBy default, Ruff stores cache results in a `.ruff_cache` directory in the current project root.\n\nHowever, Ruff will also respect the `RUFF_CACHE_DIR` environment variable, which takes precedence over that default.\n\nThis setting will override even the `RUFF_CACHE_DIR` environment variable, if set.", + "description": "A path to the cache directory.\n\nBy default, Ruff stores cache results in a `.ruff_cache` directory in\nthe current project root.\n\nHowever, Ruff will also respect the `RUFF_CACHE_DIR` environment\nvariable, which takes precedence over that default.\n\nThis setting will override even the `RUFF_CACHE_DIR` environment\nvariable, if set.", "type": [ "string", "null" ] }, "dummy-variable-rgx": { - "description": "A regular expression used to identify \"dummy\" variables, or those which should be ignored when enforcing (e.g.) unused-variable rules. The default expression matches `_`, `__`, and `_var`, but not `_var_`.", - "deprecated": true, + "description": "A regular expression used to identify \"dummy\" variables, or those which\nshould be ignored when enforcing (e.g.) unused-variable rules. The\ndefault expression matches `_`, `__`, and `_var`, but not `_var_`.", "type": [ "string", "null" - ] + ], + "deprecated": true }, "exclude": { - "description": "A list of file patterns to exclude from formatting and linting.\n\nExclusions are based on globs, and can be either:\n\n- Single-path patterns, like `.mypy_cache` (to exclude any directory named `.mypy_cache` in the tree), `foo.py` (to exclude any file named `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ). - Relative patterns, like `directory/foo.py` (to exclude that specific file) or `directory/*.py` (to exclude any Python files in `directory`). Note that these paths are relative to the project root (e.g., the directory containing your `pyproject.toml`).\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).\n\nNote that you'll typically want to use [`extend-exclude`](#extend-exclude) to modify the excluded paths.", + "description": "A list of file patterns to exclude from formatting and linting.\n\nExclusions are based on globs, and can be either:\n\n- Single-path patterns, like `.mypy_cache` (to exclude any directory\n named `.mypy_cache` in the tree), `foo.py` (to exclude any file named\n `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ).\n- Relative patterns, like `directory/foo.py` (to exclude that specific\n file) or `directory/*.py` (to exclude any Python files in\n `directory`). Note that these paths are relative to the project root\n (e.g., the directory containing your `pyproject.toml`).\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).\n\nNote that you'll typically want to use\n[`extend-exclude`](#extend-exclude) to modify the excluded paths.", "type": [ "array", "null" @@ -63,22 +63,22 @@ } }, "explicit-preview-rules": { - "description": "Whether to require exact codes to select preview rules. When enabled, preview rules will not be selected by prefixes — the full code of each preview rule will be required to enable the rule.", - "deprecated": true, + "description": "Whether to require exact codes to select preview rules. When enabled,\npreview rules will not be selected by prefixes — the full code of each\npreview rule will be required to enable the rule.", "type": [ "boolean", "null" - ] + ], + "deprecated": true }, "extend": { - "description": "A path to a local `pyproject.toml` file to merge into this configuration. User home directory and environment variables will be expanded.\n\nTo resolve the current `pyproject.toml` file, Ruff will first resolve this base configuration file, then merge in any properties defined in the current configuration file.", + "description": "A path to a local `pyproject.toml` file to merge into this\nconfiguration. User home directory and environment variables will be\nexpanded.\n\nTo resolve the current `pyproject.toml` file, Ruff will first resolve\nthis base configuration file, then merge in any properties defined\nin the current configuration file.", "type": [ "string", "null" ] }, "extend-exclude": { - "description": "A list of file patterns to omit from formatting and linting, in addition to those specified by [`exclude`](#exclude).\n\nExclusions are based on globs, and can be either:\n\n- Single-path patterns, like `.mypy_cache` (to exclude any directory named `.mypy_cache` in the tree), `foo.py` (to exclude any file named `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ). - Relative patterns, like `directory/foo.py` (to exclude that specific file) or `directory/*.py` (to exclude any Python files in `directory`). Note that these paths are relative to the project root (e.g., the directory containing your `pyproject.toml`).\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "A list of file patterns to omit from formatting and linting, in addition to those\nspecified by [`exclude`](#exclude).\n\nExclusions are based on globs, and can be either:\n\n- Single-path patterns, like `.mypy_cache` (to exclude any directory\n named `.mypy_cache` in the tree), `foo.py` (to exclude any file named\n `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ).\n- Relative patterns, like `directory/foo.py` (to exclude that specific\n file) or `directory/*.py` (to exclude any Python files in\n `directory`). Note that these paths are relative to the project root\n (e.g., the directory containing your `pyproject.toml`).\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -88,29 +88,29 @@ } }, "extend-fixable": { - "description": "A list of rule codes or prefixes to consider fixable, in addition to those specified by [`fixable`](#lint_fixable).", - "deprecated": true, + "description": "A list of rule codes or prefixes to consider fixable, in addition to those\nspecified by [`fixable`](#lint_fixable).", "type": [ "array", "null" ], + "deprecated": true, "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "extend-ignore": { - "description": "A list of rule codes or prefixes to ignore, in addition to those specified by `ignore`.", - "deprecated": true, + "description": "A list of rule codes or prefixes to ignore, in addition to those\nspecified by `ignore`.", "type": [ "array", "null" ], + "deprecated": true, "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "extend-include": { - "description": "A list of file patterns to include when linting, in addition to those specified by [`include`](#include).\n\nInclusion are based on globs, and should be single-path patterns, like `*.pyw`, to include any file with the `.pyw` extension.\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "A list of file patterns to include when linting, in addition to those\nspecified by [`include`](#include).\n\nInclusion are based on globs, and should be single-path patterns, like\n`*.pyw`, to include any file with the `.pyw` extension.\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -120,8 +120,7 @@ } }, "extend-per-file-ignores": { - "description": "A list of mappings from file pattern to rule codes or prefixes to exclude, in addition to any rules excluded by [`per-file-ignores`](#lint_per-file-ignores).", - "deprecated": true, + "description": "A list of mappings from file pattern to rule codes or prefixes to\nexclude, in addition to any rules excluded by [`per-file-ignores`](#lint_per-file-ignores).", "type": [ "object", "null" @@ -129,67 +128,68 @@ "additionalProperties": { "type": "array", "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } - } + }, + "deprecated": true }, "extend-safe-fixes": { - "description": "A list of rule codes or prefixes for which unsafe fixes should be considered safe.", - "deprecated": true, + "description": "A list of rule codes or prefixes for which unsafe fixes should be considered\nsafe.", "type": [ "array", "null" ], + "deprecated": true, "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "extend-select": { - "description": "A list of rule codes or prefixes to enable, in addition to those specified by [`select`](#lint_select).", - "deprecated": true, + "description": "A list of rule codes or prefixes to enable, in addition to those\nspecified by [`select`](#lint_select).", "type": [ "array", "null" ], + "deprecated": true, "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "extend-unfixable": { - "description": "A list of rule codes or prefixes to consider non-auto-fixable, in addition to those specified by [`unfixable`](#lint_unfixable).", - "deprecated": true, + "description": "A list of rule codes or prefixes to consider non-auto-fixable, in addition to those\nspecified by [`unfixable`](#lint_unfixable).", "type": [ "array", "null" ], + "deprecated": true, "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "extend-unsafe-fixes": { - "description": "A list of rule codes or prefixes for which safe fixes should be considered unsafe.", - "deprecated": true, + "description": "A list of rule codes or prefixes for which safe fixes should be considered\nunsafe.", "type": [ "array", "null" ], + "deprecated": true, "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "external": { - "description": "A list of rule codes or prefixes that are unsupported by Ruff, but should be preserved when (e.g.) validating `# noqa` directives. Useful for retaining `# noqa` directives that cover plugins not yet implemented by Ruff.", - "deprecated": true, + "description": "A list of rule codes or prefixes that are unsupported by Ruff, but should be\npreserved when (e.g.) validating `# noqa` directives. Useful for\nretaining `# noqa` directives that cover plugins not yet implemented\nby Ruff.", "type": [ "array", "null" ], + "deprecated": true, "items": { "type": "string" } }, "fix": { - "description": "Enable fix behavior by-default when running `ruff` (overridden by the `--fix` and `--no-fix` command-line flags). Only includes automatic fixes unless `--unsafe-fixes` is provided.", + "description": "Enable fix behavior by-default when running `ruff` (overridden\nby the `--fix` and `--no-fix` command-line flags).\nOnly includes automatic fixes unless `--unsafe-fixes` is provided.", "type": [ "boolean", "null" @@ -203,222 +203,222 @@ ] }, "fixable": { - "description": "A list of rule codes or prefixes to consider fixable. By default, all rules are considered fixable.", - "deprecated": true, + "description": "A list of rule codes or prefixes to consider fixable. By default,\nall rules are considered fixable.", "type": [ "array", "null" ], + "deprecated": true, "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "flake8-annotations": { "description": "Options for the `flake8-annotations` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8AnnotationsOptions" + "$ref": "#/$defs/Flake8AnnotationsOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-bandit": { "description": "Options for the `flake8-bandit` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8BanditOptions" + "$ref": "#/$defs/Flake8BanditOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-boolean-trap": { "description": "Options for the `flake8-boolean-trap` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8BooleanTrapOptions" + "$ref": "#/$defs/Flake8BooleanTrapOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-bugbear": { "description": "Options for the `flake8-bugbear` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8BugbearOptions" + "$ref": "#/$defs/Flake8BugbearOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-builtins": { "description": "Options for the `flake8-builtins` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8BuiltinsOptions" + "$ref": "#/$defs/Flake8BuiltinsOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-comprehensions": { "description": "Options for the `flake8-comprehensions` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8ComprehensionsOptions" + "$ref": "#/$defs/Flake8ComprehensionsOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-copyright": { "description": "Options for the `flake8-copyright` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8CopyrightOptions" + "$ref": "#/$defs/Flake8CopyrightOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-errmsg": { "description": "Options for the `flake8-errmsg` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8ErrMsgOptions" + "$ref": "#/$defs/Flake8ErrMsgOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-gettext": { "description": "Options for the `flake8-gettext` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8GetTextOptions" + "$ref": "#/$defs/Flake8GetTextOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-implicit-str-concat": { "description": "Options for the `flake8-implicit-str-concat` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8ImplicitStrConcatOptions" + "$ref": "#/$defs/Flake8ImplicitStrConcatOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-import-conventions": { "description": "Options for the `flake8-import-conventions` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8ImportConventionsOptions" + "$ref": "#/$defs/Flake8ImportConventionsOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-pytest-style": { "description": "Options for the `flake8-pytest-style` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8PytestStyleOptions" + "$ref": "#/$defs/Flake8PytestStyleOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-quotes": { "description": "Options for the `flake8-quotes` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8QuotesOptions" + "$ref": "#/$defs/Flake8QuotesOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-self": { "description": "Options for the `flake8_self` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8SelfOptions" + "$ref": "#/$defs/Flake8SelfOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-tidy-imports": { "description": "Options for the `flake8-tidy-imports` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8TidyImportsOptions" + "$ref": "#/$defs/Flake8TidyImportsOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-type-checking": { "description": "Options for the `flake8-type-checking` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8TypeCheckingOptions" + "$ref": "#/$defs/Flake8TypeCheckingOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "flake8-unused-arguments": { "description": "Options for the `flake8-unused-arguments` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Flake8UnusedArgumentsOptions" + "$ref": "#/$defs/Flake8UnusedArgumentsOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "force-exclude": { - "description": "Whether to enforce [`exclude`](#exclude) and [`extend-exclude`](#extend-exclude) patterns, even for paths that are passed to Ruff explicitly. Typically, Ruff will lint any paths passed in directly, even if they would typically be excluded. Setting `force-exclude = true` will cause Ruff to respect these exclusions unequivocally.\n\nThis is useful for [`pre-commit`](https://pre-commit.com/), which explicitly passes all changed files to the [`ruff-pre-commit`](https://github.com/astral-sh/ruff-pre-commit) plugin, regardless of whether they're marked as excluded by Ruff's own settings.", + "description": "Whether to enforce [`exclude`](#exclude) and [`extend-exclude`](#extend-exclude) patterns,\neven for paths that are passed to Ruff explicitly. Typically, Ruff will lint\nany paths passed in directly, even if they would typically be\nexcluded. Setting `force-exclude = true` will cause Ruff to\nrespect these exclusions unequivocally.\n\nThis is useful for [`pre-commit`](https://pre-commit.com/), which explicitly passes all\nchanged files to the [`ruff-pre-commit`](https://github.com/astral-sh/ruff-pre-commit)\nplugin, regardless of whether they're marked as excluded by Ruff's own\nsettings.", "type": [ "boolean", "null" @@ -428,7 +428,7 @@ "description": "Options to configure code formatting.", "anyOf": [ { - "$ref": "#/definitions/FormatOptions" + "$ref": "#/$defs/FormatOptions" }, { "type": "null" @@ -436,26 +436,26 @@ ] }, "ignore": { - "description": "A list of rule codes or prefixes to ignore. Prefixes can specify exact rules (like `F841`), entire categories (like `F`), or anything in between.\n\nWhen breaking ties between enabled and disabled rules (via `select` and `ignore`, respectively), more specific prefixes override less specific prefixes. `ignore` takes precedence over `select` if the same prefix appears in both.", - "deprecated": true, + "description": "A list of rule codes or prefixes to ignore. Prefixes can specify exact\nrules (like `F841`), entire categories (like `F`), or anything in\nbetween.\n\nWhen breaking ties between enabled and disabled rules (via `select` and\n`ignore`, respectively), more specific prefixes override less\nspecific prefixes. `ignore` takes precedence over `select` if the same\nprefix appears in both.", "type": [ "array", "null" ], + "deprecated": true, "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "ignore-init-module-imports": { - "description": "Avoid automatically removing unused imports in `__init__.py` files. Such imports will still be flagged, but with a dedicated message suggesting that the import is either added to the module's `__all__` symbol, or re-exported with a redundant alias (e.g., `import os as os`).\n\nThis option is enabled by default, but you can opt-in to removal of imports via an unsafe fix.", - "deprecated": true, + "description": "Avoid automatically removing unused imports in `__init__.py` files. Such\nimports will still be flagged, but with a dedicated message suggesting\nthat the import is either added to the module's `__all__` symbol, or\nre-exported with a redundant alias (e.g., `import os as os`).\n\nThis option is enabled by default, but you can opt-in to removal of imports\nvia an unsafe fix.", "type": [ "boolean", "null" - ] + ], + "deprecated": true }, "include": { - "description": "A list of file patterns to include when linting.\n\nInclusion are based on globs, and should be single-path patterns, like `*.pyw`, to include any file with the `.pyw` extension. `pyproject.toml` is included here not for configuration but because we lint whether e.g. the `[project]` matches the schema.\n\nNotebook files (`.ipynb` extension) are included by default on Ruff 0.6.0+.\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "A list of file patterns to include when linting.\n\nInclusion are based on globs, and should be single-path patterns, like\n`*.pyw`, to include any file with the `.pyw` extension. `pyproject.toml` is\nincluded here not for configuration but because we lint whether e.g. the\n`[project]` matches the schema.\n\nNotebook files (`.ipynb` extension) are included by default on Ruff 0.6.0+.\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -465,10 +465,10 @@ } }, "indent-width": { - "description": "The number of spaces per indentation level (tab).\n\nUsed by the formatter and when enforcing long-line violations (like `E501`) to determine the visual width of a tab.\n\nThis option changes the number of spaces the formatter inserts when using soft-tabs (`indent-style = space`).\n\nPEP 8 recommends using 4 spaces per [indentation level](https://peps.python.org/pep-0008/#indentation).", + "description": "The number of spaces per indentation level (tab).\n\nUsed by the formatter and when enforcing long-line violations (like `E501`) to determine the visual\nwidth of a tab.\n\nThis option changes the number of spaces the formatter inserts when\nusing soft-tabs (`indent-style = space`).\n\nPEP 8 recommends using 4 spaces per [indentation level](https://peps.python.org/pep-0008/#indentation).", "anyOf": [ { - "$ref": "#/definitions/IndentWidth" + "$ref": "#/$defs/IndentWidth" }, { "type": "null" @@ -477,21 +477,21 @@ }, "isort": { "description": "Options for the `isort` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/IsortOptions" + "$ref": "#/$defs/IsortOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "line-length": { - "description": "The line length to use when enforcing long-lines violations (like `E501`) and at which `isort` and the formatter prefers to wrap lines.\n\nThe length is determined by the number of characters per line, except for lines containing East Asian characters or emojis. For these lines, the [unicode width](https://unicode.org/reports/tr11/) of each character is added up to determine the length.\n\nThe value must be greater than `0` and less than or equal to `320`.\n\nNote: While the formatter will attempt to format lines such that they remain within the `line-length`, it isn't a hard upper bound, and formatted lines may exceed the `line-length`.\n\nSee [`pycodestyle.max-line-length`](#lint_pycodestyle_max-line-length) to configure different lengths for `E501` and the formatter.", + "description": "The line length to use when enforcing long-lines violations (like `E501`)\nand at which `isort` and the formatter prefers to wrap lines.\n\nThe length is determined by the number of characters per line, except for lines containing East Asian characters or emojis.\nFor these lines, the [unicode width](https://unicode.org/reports/tr11/) of each character is added up to determine the length.\n\nThe value must be greater than `0` and less than or equal to `320`.\n\nNote: While the formatter will attempt to format lines such that they remain\nwithin the `line-length`, it isn't a hard upper bound, and formatted lines may\nexceed the `line-length`.\n\nSee [`pycodestyle.max-line-length`](#lint_pycodestyle_max-line-length) to configure different lengths for `E501` and the formatter.", "anyOf": [ { - "$ref": "#/definitions/LineLength" + "$ref": "#/$defs/LineLength" }, { "type": "null" @@ -501,7 +501,7 @@ "lint": { "anyOf": [ { - "$ref": "#/definitions/LintOptions" + "$ref": "#/$defs/LintOptions" }, { "type": "null" @@ -509,30 +509,30 @@ ] }, "logger-objects": { - "description": "A list of objects that should be treated equivalently to a `logging.Logger` object.\n\nThis is useful for ensuring proper diagnostics (e.g., to identify `logging` deprecations and other best-practices) for projects that re-export a `logging.Logger` object from a common module.\n\nFor example, if you have a module `logging_setup.py` with the following contents: ```python import logging\n\nlogger = logging.getLogger(__name__) ```\n\nAdding `\"logging_setup.logger\"` to `logger-objects` will ensure that `logging_setup.logger` is treated as a `logging.Logger` object when imported from other modules (e.g., `from logging_setup import logger`).", - "deprecated": true, + "description": "A list of objects that should be treated equivalently to a\n`logging.Logger` object.\n\nThis is useful for ensuring proper diagnostics (e.g., to identify\n`logging` deprecations and other best-practices) for projects that\nre-export a `logging.Logger` object from a common module.\n\nFor example, if you have a module `logging_setup.py` with the following\ncontents:\n```python\nimport logging\n\nlogger = logging.getLogger(__name__)\n```\n\nAdding `\"logging_setup.logger\"` to `logger-objects` will ensure that\n`logging_setup.logger` is treated as a `logging.Logger` object when\nimported from other modules (e.g., `from logging_setup import logger`).", "type": [ "array", "null" ], + "deprecated": true, "items": { "type": "string" } }, "mccabe": { "description": "Options for the `mccabe` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/McCabeOptions" + "$ref": "#/$defs/McCabeOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "namespace-packages": { - "description": "Mark the specified directories as namespace packages. For the purpose of module resolution, Ruff will treat those directories and all their subdirectories as if they contained an `__init__.py` file.", + "description": "Mark the specified directories as namespace packages. For the purpose of\nmodule resolution, Ruff will treat those directories and all their subdirectories\nas if they contained an `__init__.py` file.", "type": [ "array", "null" @@ -542,10 +542,10 @@ } }, "output-format": { - "description": "The style in which violation messages should be formatted: `\"full\"` (default) (shows source), `\"concise\"`, `\"grouped\"` (group messages by file), `\"json\"` (machine-readable), `\"junit\"` (machine-readable XML), `\"github\"` (GitHub Actions annotations), `\"gitlab\"` (GitLab CI code quality report), `\"pylint\"` (Pylint text format) or `\"azure\"` (Azure Pipeline logging commands).", + "description": "The style in which violation messages should be formatted: `\"full\"` (default)\n(shows source), `\"concise\"`, `\"grouped\"` (group messages by file), `\"json\"`\n(machine-readable), `\"junit\"` (machine-readable XML), `\"github\"` (GitHub\nActions annotations), `\"gitlab\"` (GitLab CI code quality report),\n`\"pylint\"` (Pylint text format) or `\"azure\"` (Azure Pipeline logging commands).", "anyOf": [ { - "$ref": "#/definitions/OutputFormat" + "$ref": "#/$defs/OutputFormat" }, { "type": "null" @@ -554,19 +554,18 @@ }, "pep8-naming": { "description": "Options for the `pep8-naming` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/Pep8NamingOptions" + "$ref": "#/$defs/Pep8NamingOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "per-file-ignores": { - "description": "A list of mappings from file pattern to rule codes or prefixes to exclude, when considering any matching files. An initial '!' negates the file pattern.", - "deprecated": true, + "description": "A list of mappings from file pattern to rule codes or prefixes to\nexclude, when considering any matching files. An initial '!' negates\nthe file pattern.", "type": [ "object", "null" @@ -574,22 +573,23 @@ "additionalProperties": { "type": "array", "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } - } + }, + "deprecated": true }, "per-file-target-version": { - "description": "A list of mappings from glob-style file pattern to Python version to use when checking the corresponding file(s).\n\nThis may be useful for overriding the global Python version settings in `target-version` or `requires-python` for a subset of files. For example, if you have a project with a minimum supported Python version of 3.9 but a subdirectory of developer scripts that want to use a newer feature like the `match` statement from Python 3.10, you can use `per-file-target-version` to specify `\"developer_scripts/*.py\" = \"py310\"`.\n\nThis setting is used by the linter to enforce any enabled version-specific lint rules, as well as by the formatter for any version-specific formatting options, such as parenthesizing context managers on Python 3.10+.", + "description": "A list of mappings from glob-style file pattern to Python version to use when checking the\ncorresponding file(s).\n\nThis may be useful for overriding the global Python version settings in `target-version` or\n`requires-python` for a subset of files. For example, if you have a project with a minimum\nsupported Python version of 3.9 but a subdirectory of developer scripts that want to use a\nnewer feature like the `match` statement from Python 3.10, you can use\n`per-file-target-version` to specify `\"developer_scripts/*.py\" = \"py310\"`.\n\nThis setting is used by the linter to enforce any enabled version-specific lint rules, as\nwell as by the formatter for any version-specific formatting options, such as parenthesizing\ncontext managers on Python 3.10+.", "type": [ "object", "null" ], "additionalProperties": { - "$ref": "#/definitions/PythonVersion" + "$ref": "#/$defs/PythonVersion" } }, "preview": { - "description": "Whether to enable preview mode. When preview mode is enabled, Ruff will use unstable rules, fixes, and formatting.", + "description": "Whether to enable preview mode. When preview mode is enabled, Ruff will\nuse unstable rules, fixes, and formatting.", "type": [ "boolean", "null" @@ -597,69 +597,69 @@ }, "pycodestyle": { "description": "Options for the `pycodestyle` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/PycodestyleOptions" + "$ref": "#/$defs/PycodestyleOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "pydocstyle": { "description": "Options for the `pydocstyle` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/PydocstyleOptions" + "$ref": "#/$defs/PydocstyleOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "pyflakes": { "description": "Options for the `pyflakes` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/PyflakesOptions" + "$ref": "#/$defs/PyflakesOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "pylint": { "description": "Options for the `pylint` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/PylintOptions" + "$ref": "#/$defs/PylintOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "pyupgrade": { "description": "Options for the `pyupgrade` plugin.", - "deprecated": true, "anyOf": [ { - "$ref": "#/definitions/PyUpgradeOptions" + "$ref": "#/$defs/PyUpgradeOptions" }, { "type": "null" } - ] + ], + "deprecated": true }, "required-version": { - "description": "Enforce a requirement on the version of Ruff, to enforce at runtime. If the version of Ruff does not meet the requirement, Ruff will exit with an error.\n\nUseful for unifying results across many environments, e.g., with a `pyproject.toml` file.\n\nAccepts a [PEP 440](https://peps.python.org/pep-0440/) specifier, like `==0.3.1` or `>=0.3.1`.", + "description": "Enforce a requirement on the version of Ruff, to enforce at runtime.\nIf the version of Ruff does not meet the requirement, Ruff will exit\nwith an error.\n\nUseful for unifying results across many environments, e.g., with a\n`pyproject.toml` file.\n\nAccepts a [PEP 440](https://peps.python.org/pep-0440/) specifier, like `==0.3.1` or `>=0.3.1`.", "anyOf": [ { - "$ref": "#/definitions/RequiredVersion" + "$ref": "#/$defs/RequiredVersion" }, { "type": "null" @@ -667,32 +667,32 @@ ] }, "respect-gitignore": { - "description": "Whether to automatically exclude files that are ignored by `.ignore`, `.gitignore`, `.git/info/exclude`, and global `gitignore` files. Enabled by default.", + "description": "Whether to automatically exclude files that are ignored by `.ignore`,\n`.gitignore`, `.git/info/exclude`, and global `gitignore` files.\nEnabled by default.", "type": [ "boolean", "null" ] }, "select": { - "description": "A list of rule codes or prefixes to enable. Prefixes can specify exact rules (like `F841`), entire categories (like `F`), or anything in between.\n\nWhen breaking ties between enabled and disabled rules (via `select` and `ignore`, respectively), more specific prefixes override less specific prefixes. `ignore` takes precedence over `select` if the same prefix appears in both.", - "deprecated": true, + "description": "A list of rule codes or prefixes to enable. Prefixes can specify exact\nrules (like `F841`), entire categories (like `F`), or anything in\nbetween.\n\nWhen breaking ties between enabled and disabled rules (via `select` and\n`ignore`, respectively), more specific prefixes override less\nspecific prefixes. `ignore` takes precedence over `select` if the\nsame prefix appears in both.", "type": [ "array", "null" ], + "deprecated": true, "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "show-fixes": { - "description": "Whether to show an enumeration of all fixed lint violations (overridden by the `--show-fixes` command-line flag).", + "description": "Whether to show an enumeration of all fixed lint violations\n(overridden by the `--show-fixes` command-line flag).", "type": [ "boolean", "null" ] }, "src": { - "description": "The directories to consider when resolving first- vs. third-party imports.\n\nWhen omitted, the `src` directory will typically default to including both:\n\n1. The directory containing the nearest `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (the \"project root\"). 2. The `\"src\"` subdirectory of the project root.\n\nThese defaults ensure that Ruff supports both flat layouts and `src` layouts out-of-the-box. (If a configuration file is explicitly provided (e.g., via the `--config` command-line flag), the current working directory will be considered the project root.)\n\nAs an example, consider an alternative project structure, like:\n\n```text my_project ├── pyproject.toml └── lib └── my_package ├── __init__.py ├── foo.py └── bar.py ```\n\nIn this case, the `./lib` directory should be included in the `src` option (e.g., `src = [\"lib\"]`), such that when resolving imports, `my_package.foo` is considered first-party.\n\nThis field supports globs. For example, if you have a series of Python packages in a `python_modules` directory, `src = [\"python_modules/*\"]` would expand to incorporate all packages in that directory. User home directory and environment variables will also be expanded.", + "description": "The directories to consider when resolving first- vs. third-party\nimports.\n\nWhen omitted, the `src` directory will typically default to including both:\n\n1. The directory containing the nearest `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (the \"project root\").\n2. The `\"src\"` subdirectory of the project root.\n\nThese defaults ensure that Ruff supports both flat layouts and `src` layouts out-of-the-box.\n(If a configuration file is explicitly provided (e.g., via the `--config` command-line\nflag), the current working directory will be considered the project root.)\n\nAs an example, consider an alternative project structure, like:\n\n```text\nmy_project\n├── pyproject.toml\n└── lib\n └── my_package\n ├── __init__.py\n ├── foo.py\n └── bar.py\n```\n\nIn this case, the `./lib` directory should be included in the `src` option\n(e.g., `src = [\"lib\"]`), such that when resolving imports, `my_package.foo`\nis considered first-party.\n\nThis field supports globs. For example, if you have a series of Python\npackages in a `python_modules` directory, `src = [\"python_modules/*\"]`\nwould expand to incorporate all packages in that directory. User home\ndirectory and environment variables will also be expanded.", "type": [ "array", "null" @@ -702,10 +702,10 @@ } }, "target-version": { - "description": "The minimum Python version to target, e.g., when considering automatic code upgrades, like rewriting type annotations. Ruff will not propose changes using features that are not available in the given version.\n\nFor example, to represent supporting Python >=3.11 or ==3.11 specify `target-version = \"py311\"`.\n\nIf you're already using a `pyproject.toml` file, we recommend `project.requires-python` instead, as it's based on Python packaging standards, and will be respected by other tools. For example, Ruff treats the following as identical to `target-version = \"py38\"`:\n\n```toml [project] requires-python = \">=3.8\" ```\n\nIf both are specified, `target-version` takes precedence over `requires-python`. See [_Inferring the Python version_](https://docs.astral.sh/ruff/configuration/#inferring-the-python-version) for a complete description of how the `target-version` is determined when left unspecified.\n\nNote that a stub file can [sometimes make use of a typing feature](https://typing.python.org/en/latest/spec/distributing.html#syntax) before it is available at runtime, as long as the stub does not make use of new *syntax*. For example, a type checker will understand `int | str` in a stub as being a `Union` type annotation, even if the type checker is run using Python 3.9, despite the fact that the `|` operator can only be used to create union types at runtime on Python 3.10+. As such, Ruff will often recommend newer features in a stub file than it would for an equivalent runtime file with the same target version.", + "description": "The minimum Python version to target, e.g., when considering automatic\ncode upgrades, like rewriting type annotations. Ruff will not propose\nchanges using features that are not available in the given version.\n\nFor example, to represent supporting Python >=3.11 or ==3.11\nspecify `target-version = \"py311\"`.\n\nIf you're already using a `pyproject.toml` file, we recommend\n`project.requires-python` instead, as it's based on Python packaging\nstandards, and will be respected by other tools. For example, Ruff\ntreats the following as identical to `target-version = \"py38\"`:\n\n```toml\n[project]\nrequires-python = \">=3.8\"\n```\n\nIf both are specified, `target-version` takes precedence over\n`requires-python`. See [_Inferring the Python version_](https://docs.astral.sh/ruff/configuration/#inferring-the-python-version)\nfor a complete description of how the `target-version` is determined\nwhen left unspecified.\n\nNote that a stub file can [sometimes make use of a typing feature](https://typing.python.org/en/latest/spec/distributing.html#syntax)\nbefore it is available at runtime, as long as the stub does not make\nuse of new *syntax*. For example, a type checker will understand\n`int | str` in a stub as being a `Union` type annotation, even if the\ntype checker is run using Python 3.9, despite the fact that the `|`\noperator can only be used to create union types at runtime on Python\n3.10+. As such, Ruff will often recommend newer features in a stub\nfile than it would for an equivalent runtime file with the same target\nversion.", "anyOf": [ { - "$ref": "#/definitions/PythonVersion" + "$ref": "#/$defs/PythonVersion" }, { "type": "null" @@ -713,40 +713,40 @@ ] }, "task-tags": { - "description": "A list of task tags to recognize (e.g., \"TODO\", \"FIXME\", \"XXX\").\n\nComments starting with these tags will be ignored by commented-out code detection (`ERA`), and skipped by line-length rules (`E501`) if [`ignore-overlong-task-comments`](#lint_pycodestyle_ignore-overlong-task-comments) is set to `true`.", - "deprecated": true, + "description": "A list of task tags to recognize (e.g., \"TODO\", \"FIXME\", \"XXX\").\n\nComments starting with these tags will be ignored by commented-out code\ndetection (`ERA`), and skipped by line-length rules (`E501`) if\n[`ignore-overlong-task-comments`](#lint_pycodestyle_ignore-overlong-task-comments) is set to `true`.", "type": [ "array", "null" ], + "deprecated": true, "items": { "type": "string" } }, "typing-modules": { - "description": "A list of modules whose exports should be treated equivalently to members of the `typing` module.\n\nThis is useful for ensuring proper type annotation inference for projects that re-export `typing` and `typing_extensions` members from a compatibility module. If omitted, any members imported from modules apart from `typing` and `typing_extensions` will be treated as ordinary Python objects.", - "deprecated": true, + "description": "A list of modules whose exports should be treated equivalently to\nmembers of the `typing` module.\n\nThis is useful for ensuring proper type annotation inference for\nprojects that re-export `typing` and `typing_extensions` members\nfrom a compatibility module. If omitted, any members imported from\nmodules apart from `typing` and `typing_extensions` will be treated\nas ordinary Python objects.", "type": [ "array", "null" ], + "deprecated": true, "items": { "type": "string" } }, "unfixable": { "description": "A list of rule codes or prefixes to consider non-fixable.", - "deprecated": true, "type": [ "array", "null" ], + "deprecated": true, "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "unsafe-fixes": { - "description": "Enable application of unsafe fixes. If excluded, a hint will be displayed when unsafe fixes are available. If set to false, the hint will be hidden.", + "description": "Enable application of unsafe fixes.\nIf excluded, a hint will be displayed when unsafe fixes are available.\nIf set to false, the hint will be hidden.", "type": [ "boolean", "null" @@ -754,7 +754,7 @@ } }, "additionalProperties": false, - "definitions": { + "$defs": { "Alias": { "type": "string" }, @@ -763,17 +763,17 @@ "type": "object", "properties": { "detect-string-imports": { - "description": "Whether to detect imports from string literals. When enabled, Ruff will search for string literals that \"look like\" import paths, and include them in the import map, if they resolve to valid Python modules.", + "description": "Whether to detect imports from string literals. When enabled, Ruff will search for string\nliterals that \"look like\" import paths, and include them in the import map, if they resolve\nto valid Python modules.", "type": [ "boolean", "null" ] }, "direction": { - "description": "Whether to generate a map from file to files that it depends on (dependencies) or files that depend on it (dependents).", + "description": "Whether to generate a map from file to files that it depends on (dependencies) or files that\ndepend on it (dependents).", "anyOf": [ { - "$ref": "#/definitions/Direction" + "$ref": "#/$defs/Direction" }, { "type": "null" @@ -781,7 +781,7 @@ ] }, "exclude": { - "description": "A list of file patterns to exclude from analysis in addition to the files excluded globally (see [`exclude`](#exclude), and [`extend-exclude`](#extend-exclude)).\n\nExclusions are based on globs, and can be either:\n\n- Single-path patterns, like `.mypy_cache` (to exclude any directory named `.mypy_cache` in the tree), `foo.py` (to exclude any file named `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ). - Relative patterns, like `directory/foo.py` (to exclude that specific file) or `directory/*.py` (to exclude any Python files in `directory`). Note that these paths are relative to the project root (e.g., the directory containing your `pyproject.toml`).\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "A list of file patterns to exclude from analysis in addition to the files excluded globally (see [`exclude`](#exclude), and [`extend-exclude`](#extend-exclude)).\n\nExclusions are based on globs, and can be either:\n\n- Single-path patterns, like `.mypy_cache` (to exclude any directory\n named `.mypy_cache` in the tree), `foo.py` (to exclude any file named\n `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ).\n- Relative patterns, like `directory/foo.py` (to exclude that specific\n file) or `directory/*.py` (to exclude any Python files in\n `directory`). Note that these paths are relative to the project root\n (e.g., the directory containing your `pyproject.toml`).\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -791,7 +791,7 @@ } }, "include-dependencies": { - "description": "A map from file path to the list of Python or non-Python file paths or globs that should be considered dependencies of that file, regardless of whether relevant imports are detected.", + "description": "A map from file path to the list of Python or non-Python file paths or globs that should be\nconsidered dependencies of that file, regardless of whether relevant imports are detected.", "type": [ "object", "null" @@ -804,36 +804,36 @@ } }, "preview": { - "description": "Whether to enable preview mode. When preview mode is enabled, Ruff will expose unstable commands.", + "description": "Whether to enable preview mode. When preview mode is enabled, Ruff will expose unstable\ncommands.", "type": [ "boolean", "null" ] }, "string-imports-min-dots": { - "description": "The minimum number of dots in a string to consider it a valid import.\n\nThis setting is only relevant when [`detect-string-imports`](#detect-string-imports) is enabled. For example, if this is set to `2`, then only strings with at least two dots (e.g., `\"path.to.module\"`) would be considered valid imports.", + "description": "The minimum number of dots in a string to consider it a valid import.\n\nThis setting is only relevant when [`detect-string-imports`](#detect-string-imports) is enabled.\nFor example, if this is set to `2`, then only strings with at least two dots (e.g., `\"path.to.module\"`)\nwould be considered valid imports.", "type": [ "integer", "null" ], "format": "uint", - "minimum": 0.0 + "minimum": 0 } }, "additionalProperties": false }, "ApiBan": { "type": "object", - "required": [ - "msg" - ], "properties": { "msg": { "description": "The message to display when the API is used.", "type": "string" } }, - "additionalProperties": false + "additionalProperties": false, + "required": [ + "msg" + ] }, "BannedAliases": { "type": "array", @@ -856,23 +856,17 @@ { "description": "Use Google-style docstrings.", "type": "string", - "enum": [ - "google" - ] + "const": "google" }, { "description": "Use NumPy-style docstrings.", "type": "string", - "enum": [ - "numpy" - ] + "const": "numpy" }, { "description": "Use PEP257-style docstrings.", "type": "string", - "enum": [ - "pep257" - ] + "const": "pep257" } ] }, @@ -881,16 +875,12 @@ { "description": "Construct a map from module to its dependencies (i.e., the modules that it imports).", "type": "string", - "enum": [ - "dependencies" - ] + "const": "dependencies" }, { "description": "Construct a map from module to its dependents (i.e., the modules that import it).", "type": "string", - "enum": [ - "dependents" - ] + "const": "dependents" } ] }, @@ -900,7 +890,7 @@ "description": "Wrap docstring code examples at a fixed line width.", "oneOf": [ { - "$ref": "#/definitions/LineWidth" + "$ref": "#/$defs/LineWidth" } ] }, @@ -915,35 +905,35 @@ "type": "object", "properties": { "allow-star-arg-any": { - "description": "Whether to suppress `ANN401` for dynamically typed `*args` and `**kwargs` arguments.", + "description": "Whether to suppress `ANN401` for dynamically typed `*args` and\n`**kwargs` arguments.", "type": [ "boolean", "null" ] }, "ignore-fully-untyped": { - "description": "Whether to suppress `ANN*` rules for any declaration that hasn't been typed at all. This makes it easier to gradually add types to a codebase.", + "description": "Whether to suppress `ANN*` rules for any declaration\nthat hasn't been typed at all.\nThis makes it easier to gradually add types to a codebase.", "type": [ "boolean", "null" ] }, "mypy-init-return": { - "description": "Whether to allow the omission of a return type hint for `__init__` if at least one argument is annotated.", + "description": "Whether to allow the omission of a return type hint for `__init__` if at\nleast one argument is annotated.", "type": [ "boolean", "null" ] }, "suppress-dummy-args": { - "description": "Whether to suppress `ANN000`-level violations for arguments matching the \"dummy\" variable regex (like `_`).", + "description": "Whether to suppress `ANN000`-level violations for arguments matching the\n\"dummy\" variable regex (like `_`).", "type": [ "boolean", "null" ] }, "suppress-none-returning": { - "description": "Whether to suppress `ANN200`-level violations for functions that meet either of the following criteria:\n\n- Contain no `return` statement. - Explicit `return` statement(s) all return `None` (explicitly or implicitly).", + "description": "Whether to suppress `ANN200`-level violations for functions that meet\neither of the following criteria:\n\n- Contain no `return` statement.\n- Explicit `return` statement(s) all return `None` (explicitly or\n implicitly).", "type": [ "boolean", "null" @@ -957,7 +947,7 @@ "type": "object", "properties": { "allowed-markup-calls": { - "description": "A list of callable names, whose result may be safely passed into [`markupsafe.Markup`](https://markupsafe.palletsprojects.com/en/stable/escaping/#markupsafe.Markup).\n\nExpects to receive a list of fully-qualified names (e.g., `bleach.clean`, rather than `clean`).\n\nThis setting helps you avoid false positives in code like:\n\n```python from bleach import clean from markupsafe import Markup\n\ncleaned_markup = Markup(clean(some_user_input)) ```\n\nWhere the use of [`bleach.clean`](https://bleach.readthedocs.io/en/latest/clean.html) usually ensures that there's no XSS vulnerability.\n\nAlthough it is not recommended, you may also use this setting to whitelist other kinds of calls, e.g. calls to i18n translation functions, where how safe that is will depend on the implementation and how well the translations are audited.\n\nAnother common use-case is to wrap the output of functions that generate markup like [`xml.etree.ElementTree.tostring`](https://docs.python.org/3/library/xml.etree.elementtree.html#xml.etree.ElementTree.tostring) or template rendering engines where sanitization of potential user input is either already baked in or has to happen before rendering.", + "description": "A list of callable names, whose result may be safely passed into\n[`markupsafe.Markup`](https://markupsafe.palletsprojects.com/en/stable/escaping/#markupsafe.Markup).\n\nExpects to receive a list of fully-qualified names (e.g., `bleach.clean`, rather than `clean`).\n\nThis setting helps you avoid false positives in code like:\n\n```python\nfrom bleach import clean\nfrom markupsafe import Markup\n\ncleaned_markup = Markup(clean(some_user_input))\n```\n\nWhere the use of [`bleach.clean`](https://bleach.readthedocs.io/en/latest/clean.html)\nusually ensures that there's no XSS vulnerability.\n\nAlthough it is not recommended, you may also use this setting to whitelist other\nkinds of calls, e.g. calls to i18n translation functions, where how safe that is\nwill depend on the implementation and how well the translations are audited.\n\nAnother common use-case is to wrap the output of functions that generate markup\nlike [`xml.etree.ElementTree.tostring`](https://docs.python.org/3/library/xml.etree.elementtree.html#xml.etree.ElementTree.tostring)\nor template rendering engines where sanitization of potential user input is either\nalready baked in or has to happen before rendering.", "type": [ "array", "null" @@ -967,14 +957,14 @@ } }, "check-typed-exception": { - "description": "Whether to disallow `try`-`except`-`pass` (`S110`) for specific exception types. By default, `try`-`except`-`pass` is only disallowed for `Exception` and `BaseException`.", + "description": "Whether to disallow `try`-`except`-`pass` (`S110`) for specific\nexception types. By default, `try`-`except`-`pass` is only\ndisallowed for `Exception` and `BaseException`.", "type": [ "boolean", "null" ] }, "extend-markup-names": { - "description": "A list of additional callable names that behave like [`markupsafe.Markup`](https://markupsafe.palletsprojects.com/en/stable/escaping/#markupsafe.Markup).\n\nExpects to receive a list of fully-qualified names (e.g., `webhelpers.html.literal`, rather than `literal`).", + "description": "A list of additional callable names that behave like\n[`markupsafe.Markup`](https://markupsafe.palletsprojects.com/en/stable/escaping/#markupsafe.Markup).\n\nExpects to receive a list of fully-qualified names (e.g., `webhelpers.html.literal`, rather than\n`literal`).", "type": [ "array", "null" @@ -994,7 +984,7 @@ } }, "hardcoded-tmp-directory-extend": { - "description": "A list of directories to consider temporary, in addition to those specified by [`hardcoded-tmp-directory`](#lint_flake8-bandit_hardcoded-tmp-directory) (see `S108`).", + "description": "A list of directories to consider temporary, in addition to those\nspecified by [`hardcoded-tmp-directory`](#lint_flake8-bandit_hardcoded-tmp-directory) (see `S108`).", "type": [ "array", "null" @@ -1011,7 +1001,7 @@ "type": "object", "properties": { "extend-allowed-calls": { - "description": "Additional callable functions with which to allow boolean traps.\n\nExpects to receive a list of fully-qualified names (e.g., `pydantic.Field`, rather than `Field`).", + "description": "Additional callable functions with which to allow boolean traps.\n\nExpects to receive a list of fully-qualified names (e.g., `pydantic.Field`, rather than\n`Field`).", "type": [ "array", "null" @@ -1028,7 +1018,7 @@ "type": "object", "properties": { "extend-immutable-calls": { - "description": "Additional callable functions to consider \"immutable\" when evaluating, e.g., the `function-call-in-default-argument` rule (`B008`) or `function-call-in-dataclass-defaults` rule (`RUF009`).\n\nExpects to receive a list of fully-qualified names (e.g., `fastapi.Query`, rather than `Query`).", + "description": "Additional callable functions to consider \"immutable\" when evaluating, e.g., the\n`function-call-in-default-argument` rule (`B008`) or `function-call-in-dataclass-defaults`\nrule (`RUF009`).\n\nExpects to receive a list of fully-qualified names (e.g., `fastapi.Query`, rather than\n`Query`).", "type": [ "array", "null" @@ -1056,33 +1046,33 @@ }, "builtins-allowed-modules": { "description": "DEPRECATED: This option has been renamed to `allowed-modules`. Use `allowed-modules` instead.\n\nList of builtin module names to allow.\n\nThis option is ignored if both `allowed-modules` and `builtins-allowed-modules` are set.", - "deprecated": true, "type": [ "array", "null" ], + "deprecated": true, "items": { "type": "string" } }, "builtins-ignorelist": { "description": "DEPRECATED: This option has been renamed to `ignorelist`. Use `ignorelist` instead.\n\nIgnore list of builtins.\n\nThis option is ignored if both `ignorelist` and `builtins-ignorelist` are set.", - "deprecated": true, "type": [ "array", "null" ], + "deprecated": true, "items": { "type": "string" } }, "builtins-strict-checking": { "description": "DEPRECATED: This option has been renamed to `strict-checking`. Use `strict-checking` instead.\n\nCompare module names instead of full module paths.\n\nThis option is ignored if both `strict-checking` and `builtins-strict-checking` are set.", - "deprecated": true, "type": [ "boolean", "null" - ] + ], + "deprecated": true }, "ignorelist": { "description": "Ignore list of builtins.", @@ -1123,23 +1113,23 @@ "type": "object", "properties": { "author": { - "description": "Author to enforce within the copyright notice. If provided, the author must be present immediately following the copyright notice.", + "description": "Author to enforce within the copyright notice. If provided, the\nauthor must be present immediately following the copyright notice.", "type": [ "string", "null" ] }, "min-file-size": { - "description": "A minimum file size (in bytes) required for a copyright notice to be enforced. By default, all files are validated.", + "description": "A minimum file size (in bytes) required for a copyright notice to\nbe enforced. By default, all files are validated.", "type": [ "integer", "null" ], "format": "uint", - "minimum": 0.0 + "minimum": 0 }, "notice-rgx": { - "description": "The regular expression used to match the copyright notice, compiled with the [`regex`](https://docs.rs/regex/latest/regex/) crate. Defaults to `(?i)Copyright\\s+((?:\\(C\\)|©)\\s+)?\\d{4}((-|,\\s)\\d{4})*`, which matches the following:\n\n- `Copyright 2023` - `Copyright (C) 2023` - `Copyright 2021-2023` - `Copyright (C) 2021-2023` - `Copyright (C) 2021, 2023`", + "description": "The regular expression used to match the copyright notice, compiled\nwith the [`regex`](https://docs.rs/regex/latest/regex/) crate.\nDefaults to `(?i)Copyright\\s+((?:\\(C\\)|©)\\s+)?\\d{4}((-|,\\s)\\d{4})*`, which matches\nthe following:\n\n- `Copyright 2023`\n- `Copyright (C) 2023`\n- `Copyright 2021-2023`\n- `Copyright (C) 2021-2023`\n- `Copyright (C) 2021, 2023`", "type": [ "string", "null" @@ -1159,7 +1149,7 @@ "null" ], "format": "uint", - "minimum": 0.0 + "minimum": 0 } }, "additionalProperties": false @@ -1169,13 +1159,13 @@ "type": "object", "properties": { "extend-function-names": { - "description": "Additional function names to consider as internationalization calls, in addition to those included in [`function-names`](#lint_flake8-gettext_function-names).", + "description": "Additional function names to consider as internationalization calls, in addition to those\nincluded in [`function-names`](#lint_flake8-gettext_function-names).", "type": [ "array", "null" ], "items": { - "type": "string" + "$ref": "#/$defs/string" } }, "function-names": { @@ -1185,7 +1175,7 @@ "null" ], "items": { - "type": "string" + "$ref": "#/$defs/string" } } }, @@ -1196,7 +1186,7 @@ "type": "object", "properties": { "allow-multiline": { - "description": "Whether to allow implicit string concatenations for multiline strings. By default, implicit concatenations of multiline strings are allowed (but continuation lines, delimited with a backslash, are prohibited).\n\nSetting `allow-multiline = false` will automatically disable the `explicit-string-concatenation` (`ISC003`) rule. Otherwise, both implicit and explicit multiline string concatenations would be seen as violations, making it impossible to write a linter-compliant multiline string.", + "description": "Whether to allow implicit string concatenations for multiline strings.\nBy default, implicit concatenations of multiline strings are\nallowed (but continuation lines, delimited with a backslash, are\nprohibited).\n\nSetting `allow-multiline = false` will automatically disable the\n`explicit-string-concatenation` (`ISC003`) rule. Otherwise, both\nimplicit and explicit multiline string concatenations would be seen\nas violations, making it impossible to write a linter-compliant multiline\nstring.", "type": [ "boolean", "null" @@ -1210,13 +1200,13 @@ "type": "object", "properties": { "aliases": { - "description": "The conventional aliases for imports. These aliases can be extended by the [`extend-aliases`](#lint_flake8-import-conventions_extend-aliases) option.", + "description": "The conventional aliases for imports. These aliases can be extended by\nthe [`extend-aliases`](#lint_flake8-import-conventions_extend-aliases) option.", "type": [ "object", "null" ], "additionalProperties": { - "$ref": "#/definitions/Alias" + "$ref": "#/$defs/Alias" } }, "banned-aliases": { @@ -1226,11 +1216,11 @@ "null" ], "additionalProperties": { - "$ref": "#/definitions/BannedAliases" + "$ref": "#/$defs/BannedAliases" } }, "banned-from": { - "description": "A list of modules that should not be imported from using the `from ... import ...` syntax.\n\nFor example, given `banned-from = [\"pandas\"]`, `from pandas import DataFrame` would be disallowed, while `import pandas` would be allowed.", + "description": "A list of modules that should not be imported from using the\n`from ... import ...` syntax.\n\nFor example, given `banned-from = [\"pandas\"]`, `from pandas import DataFrame`\nwould be disallowed, while `import pandas` would be allowed.", "type": [ "array", "null" @@ -1241,13 +1231,13 @@ "uniqueItems": true }, "extend-aliases": { - "description": "A mapping from module to conventional import alias. These aliases will be added to the [`aliases`](#lint_flake8-import-conventions_aliases) mapping.", + "description": "A mapping from module to conventional import alias. These aliases will\nbe added to the [`aliases`](#lint_flake8-import-conventions_aliases) mapping.", "type": [ "object", "null" ], "additionalProperties": { - "$ref": "#/definitions/Alias" + "$ref": "#/$defs/Alias" } } }, @@ -1258,24 +1248,24 @@ "type": "object", "properties": { "fixture-parentheses": { - "description": "Boolean flag specifying whether `@pytest.fixture()` without parameters should have parentheses. If the option is set to `false` (the default), `@pytest.fixture` is valid and `@pytest.fixture()` is invalid. If set to `true`, `@pytest.fixture()` is valid and `@pytest.fixture` is invalid.", + "description": "Boolean flag specifying whether `@pytest.fixture()` without parameters\nshould have parentheses. If the option is set to `false` (the default),\n`@pytest.fixture` is valid and `@pytest.fixture()` is invalid. If set\nto `true`, `@pytest.fixture()` is valid and `@pytest.fixture` is\ninvalid.", "type": [ "boolean", "null" ] }, "mark-parentheses": { - "description": "Boolean flag specifying whether `@pytest.mark.foo()` without parameters should have parentheses. If the option is set to `false` (the default), `@pytest.mark.foo` is valid and `@pytest.mark.foo()` is invalid. If set to `true`, `@pytest.mark.foo()` is valid and `@pytest.mark.foo` is invalid.", + "description": "Boolean flag specifying whether `@pytest.mark.foo()` without parameters\nshould have parentheses. If the option is set to `false` (the\ndefault), `@pytest.mark.foo` is valid and `@pytest.mark.foo()` is\ninvalid. If set to `true`, `@pytest.mark.foo()` is valid and\n`@pytest.mark.foo` is invalid.", "type": [ "boolean", "null" ] }, "parametrize-names-type": { - "description": "Expected type for multiple argument names in `@pytest.mark.parametrize`. The following values are supported:\n\n- `csv` — a comma-separated list, e.g. `@pytest.mark.parametrize(\"name1,name2\", ...)` - `tuple` (default) — e.g. `@pytest.mark.parametrize((\"name1\", \"name2\"), ...)` - `list` — e.g. `@pytest.mark.parametrize([\"name1\", \"name2\"], ...)`", + "description": "Expected type for multiple argument names in `@pytest.mark.parametrize`.\nThe following values are supported:\n\n- `csv` — a comma-separated list, e.g.\n `@pytest.mark.parametrize(\"name1,name2\", ...)`\n- `tuple` (default) — e.g.\n `@pytest.mark.parametrize((\"name1\", \"name2\"), ...)`\n- `list` — e.g. `@pytest.mark.parametrize([\"name1\", \"name2\"], ...)`", "anyOf": [ { - "$ref": "#/definitions/ParametrizeNameType" + "$ref": "#/$defs/ParametrizeNameType" }, { "type": "null" @@ -1283,10 +1273,10 @@ ] }, "parametrize-values-row-type": { - "description": "Expected type for each row of values in `@pytest.mark.parametrize` in case of multiple parameters. The following values are supported:\n\n- `tuple` (default) — e.g. `@pytest.mark.parametrize((\"name1\", \"name2\"), [(1, 2), (3, 4)])` - `list` — e.g. `@pytest.mark.parametrize((\"name1\", \"name2\"), [[1, 2], [3, 4]])`", + "description": "Expected type for each row of values in `@pytest.mark.parametrize` in\ncase of multiple parameters. The following values are supported:\n\n- `tuple` (default) — e.g.\n `@pytest.mark.parametrize((\"name1\", \"name2\"), [(1, 2), (3, 4)])`\n- `list` — e.g.\n `@pytest.mark.parametrize((\"name1\", \"name2\"), [[1, 2], [3, 4]])`", "anyOf": [ { - "$ref": "#/definitions/ParametrizeValuesRowType" + "$ref": "#/$defs/ParametrizeValuesRowType" }, { "type": "null" @@ -1294,10 +1284,10 @@ ] }, "parametrize-values-type": { - "description": "Expected type for the list of values rows in `@pytest.mark.parametrize`. The following values are supported:\n\n- `tuple` — e.g. `@pytest.mark.parametrize(\"name\", (1, 2, 3))` - `list` (default) — e.g. `@pytest.mark.parametrize(\"name\", [1, 2, 3])`", + "description": "Expected type for the list of values rows in `@pytest.mark.parametrize`.\nThe following values are supported:\n\n- `tuple` — e.g. `@pytest.mark.parametrize(\"name\", (1, 2, 3))`\n- `list` (default) — e.g. `@pytest.mark.parametrize(\"name\", [1, 2, 3])`", "anyOf": [ { - "$ref": "#/definitions/ParametrizeValuesType" + "$ref": "#/$defs/ParametrizeValuesType" }, { "type": "null" @@ -1305,7 +1295,7 @@ ] }, "raises-extend-require-match-for": { - "description": "List of additional exception names that require a match= parameter in a `pytest.raises()` call. This extends the default list of exceptions that require a match= parameter. This option is useful if you want to extend the default list of exceptions that require a match= parameter without having to specify the entire list. Note that this option does not remove any exceptions from the default list.\n\nSupports glob patterns. For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "List of additional exception names that require a match= parameter in a\n`pytest.raises()` call. This extends the default list of exceptions\nthat require a match= parameter.\nThis option is useful if you want to extend the default list of\nexceptions that require a match= parameter without having to specify\nthe entire list.\nNote that this option does not remove any exceptions from the default\nlist.\n\nSupports glob patterns. For more information on the glob syntax, refer\nto the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -1315,7 +1305,7 @@ } }, "raises-require-match-for": { - "description": "List of exception names that require a match= parameter in a `pytest.raises()` call.\n\nSupports glob patterns. For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "List of exception names that require a match= parameter in a\n`pytest.raises()` call.\n\nSupports glob patterns. For more information on the glob syntax, refer\nto the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -1325,7 +1315,7 @@ } }, "warns-extend-require-match-for": { - "description": "List of additional warning names that require a match= parameter in a `pytest.warns()` call. This extends the default list of warnings that require a match= parameter.\n\nThis option is useful if you want to extend the default list of warnings that require a match= parameter without having to specify the entire list.\n\nNote that this option does not remove any warnings from the default list.\n\nSupports glob patterns. For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "List of additional warning names that require a match= parameter in a\n`pytest.warns()` call. This extends the default list of warnings that\nrequire a match= parameter.\n\nThis option is useful if you want to extend the default list of warnings\nthat require a match= parameter without having to specify the entire\nlist.\n\nNote that this option does not remove any warnings from the default\nlist.\n\nSupports glob patterns. For more information on the glob syntax, refer\nto the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -1335,7 +1325,7 @@ } }, "warns-require-match-for": { - "description": "List of warning names that require a match= parameter in a `pytest.warns()` call.\n\nSupports glob patterns. For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "List of warning names that require a match= parameter in a\n`pytest.warns()` call.\n\nSupports glob patterns. For more information on the glob syntax, refer\nto the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -1352,17 +1342,17 @@ "type": "object", "properties": { "avoid-escape": { - "description": "Whether to avoid using single quotes if a string contains single quotes, or vice-versa with double quotes, as per [PEP 8](https://peps.python.org/pep-0008/#string-quotes). This minimizes the need to escape quotation marks within strings.", + "description": "Whether to avoid using single quotes if a string contains single quotes,\nor vice-versa with double quotes, as per [PEP 8](https://peps.python.org/pep-0008/#string-quotes).\nThis minimizes the need to escape quotation marks within strings.", "type": [ "boolean", "null" ] }, "docstring-quotes": { - "description": "Quote style to prefer for docstrings (either \"single\" or \"double\").\n\nWhen using the formatter, only \"double\" is compatible, as the formatter enforces double quotes for docstrings strings.", + "description": "Quote style to prefer for docstrings (either \"single\" or \"double\").\n\nWhen using the formatter, only \"double\" is compatible, as the formatter\nenforces double quotes for docstrings strings.", "anyOf": [ { - "$ref": "#/definitions/Quote" + "$ref": "#/$defs/Quote" }, { "type": "null" @@ -1370,10 +1360,10 @@ ] }, "inline-quotes": { - "description": "Quote style to prefer for inline strings (either \"single\" or \"double\").\n\nWhen using the formatter, ensure that [`format.quote-style`](#format_quote-style) is set to the same preferred quote style.", + "description": "Quote style to prefer for inline strings (either \"single\" or\n\"double\").\n\nWhen using the formatter, ensure that [`format.quote-style`](#format_quote-style) is set to\nthe same preferred quote style.", "anyOf": [ { - "$ref": "#/definitions/Quote" + "$ref": "#/$defs/Quote" }, { "type": "null" @@ -1381,10 +1371,10 @@ ] }, "multiline-quotes": { - "description": "Quote style to prefer for multiline strings (either \"single\" or \"double\").\n\nWhen using the formatter, only \"double\" is compatible, as the formatter enforces double quotes for multiline strings.", + "description": "Quote style to prefer for multiline strings (either \"single\" or\n\"double\").\n\nWhen using the formatter, only \"double\" is compatible, as the formatter\nenforces double quotes for multiline strings.", "anyOf": [ { - "$ref": "#/definitions/Quote" + "$ref": "#/$defs/Quote" }, { "type": "null" @@ -1399,13 +1389,13 @@ "type": "object", "properties": { "extend-ignore-names": { - "description": "Additional names to ignore when considering `flake8-self` violations, in addition to those included in [`ignore-names`](#lint_flake8-self_ignore-names).", + "description": "Additional names to ignore when considering `flake8-self` violations,\nin addition to those included in [`ignore-names`](#lint_flake8-self_ignore-names).", "type": [ "array", "null" ], "items": { - "type": "string" + "$ref": "#/$defs/string" } }, "ignore-names": { @@ -1415,7 +1405,7 @@ "null" ], "items": { - "type": "string" + "$ref": "#/$defs/string" } } }, @@ -1426,10 +1416,10 @@ "type": "object", "properties": { "ban-relative-imports": { - "description": "Whether to ban all relative imports (`\"all\"`), or only those imports that extend into the parent module or beyond (`\"parents\"`).", + "description": "Whether to ban all relative imports (`\"all\"`), or only those imports\nthat extend into the parent module or beyond (`\"parents\"`).", "anyOf": [ { - "$ref": "#/definitions/Strictness" + "$ref": "#/$defs/Strictness" }, { "type": "null" @@ -1437,17 +1427,17 @@ ] }, "banned-api": { - "description": "Specific modules or module members that may not be imported or accessed. Note that this rule is only meant to flag accidental uses, and can be circumvented via `eval` or `importlib`.", + "description": "Specific modules or module members that may not be imported or accessed.\nNote that this rule is only meant to flag accidental uses,\nand can be circumvented via `eval` or `importlib`.", "type": [ "object", "null" ], "additionalProperties": { - "$ref": "#/definitions/ApiBan" + "$ref": "#/$defs/ApiBan" } }, "banned-module-level-imports": { - "description": "List of specific modules that may not be imported at module level, and should instead be imported lazily (e.g., within a function definition, or an `if TYPE_CHECKING:` block, or some other nested context). This also affects the rule `import-outside-top-level` if `banned-module-level-imports` is enabled.", + "description": "List of specific modules that may not be imported at module level, and should instead be\nimported lazily (e.g., within a function definition, or an `if TYPE_CHECKING:`\nblock, or some other nested context). This also affects the rule `import-outside-top-level`\nif `banned-module-level-imports` is enabled.", "type": [ "array", "null" @@ -1464,7 +1454,7 @@ "type": "object", "properties": { "exempt-modules": { - "description": "Exempt certain modules from needing to be moved into type-checking blocks.", + "description": "Exempt certain modules from needing to be moved into type-checking\nblocks.", "type": [ "array", "null" @@ -1474,14 +1464,14 @@ } }, "quote-annotations": { - "description": "Whether to add quotes around type annotations, if doing so would allow the corresponding import to be moved into a type-checking block.\n\nFor example, in the following, Python requires that `Sequence` be available at runtime, despite the fact that it's only used in a type annotation:\n\n```python from collections.abc import Sequence\n\ndef func(value: Sequence[int]) -> None: ... ```\n\nIn other words, moving `from collections.abc import Sequence` into an `if TYPE_CHECKING:` block above would cause a runtime error, as the type would no longer be available at runtime.\n\nBy default, Ruff will respect such runtime semantics and avoid moving the import to prevent such runtime errors.\n\nSetting `quote-annotations` to `true` will instruct Ruff to add quotes around the annotation (e.g., `\"Sequence[int]\"`), which in turn enables Ruff to move the import into an `if TYPE_CHECKING:` block, like so:\n\n```python from typing import TYPE_CHECKING\n\nif TYPE_CHECKING: from collections.abc import Sequence\n\ndef func(value: \"Sequence[int]\") -> None: ... ```\n\nNote that this setting has no effect when `from __future__ import annotations` is present, as `__future__` annotations are always treated equivalently to quoted annotations. Similarly, this setting has no effect on Python versions after 3.14 because these annotations are also deferred.", + "description": "Whether to add quotes around type annotations, if doing so would allow\nthe corresponding import to be moved into a type-checking block.\n\nFor example, in the following, Python requires that `Sequence` be\navailable at runtime, despite the fact that it's only used in a type\nannotation:\n\n```python\nfrom collections.abc import Sequence\n\n\ndef func(value: Sequence[int]) -> None:\n ...\n```\n\nIn other words, moving `from collections.abc import Sequence` into an\n`if TYPE_CHECKING:` block above would cause a runtime error, as the\ntype would no longer be available at runtime.\n\nBy default, Ruff will respect such runtime semantics and avoid moving\nthe import to prevent such runtime errors.\n\nSetting `quote-annotations` to `true` will instruct Ruff to add quotes\naround the annotation (e.g., `\"Sequence[int]\"`), which in turn enables\nRuff to move the import into an `if TYPE_CHECKING:` block, like so:\n\n```python\nfrom typing import TYPE_CHECKING\n\nif TYPE_CHECKING:\n from collections.abc import Sequence\n\n\ndef func(value: \"Sequence[int]\") -> None:\n ...\n```\n\nNote that this setting has no effect when `from __future__ import annotations`\nis present, as `__future__` annotations are always treated equivalently\nto quoted annotations. Similarly, this setting has no effect on Python\nversions after 3.14 because these annotations are also deferred.", "type": [ "boolean", "null" ] }, "runtime-evaluated-base-classes": { - "description": "Exempt classes that list any of the enumerated classes as a base class from needing to be moved into type-checking blocks.\n\nCommon examples include Pydantic's `pydantic.BaseModel` and SQLAlchemy's `sqlalchemy.orm.DeclarativeBase`, but can also support user-defined classes that inherit from those base classes. For example, if you define a common `DeclarativeBase` subclass that's used throughout your project (e.g., `class Base(DeclarativeBase) ...` in `base.py`), you can add it to this list (`runtime-evaluated-base-classes = [\"base.Base\"]`) to exempt models from being moved into type-checking blocks.", + "description": "Exempt classes that list any of the enumerated classes as a base class\nfrom needing to be moved into type-checking blocks.\n\nCommon examples include Pydantic's `pydantic.BaseModel` and SQLAlchemy's\n`sqlalchemy.orm.DeclarativeBase`, but can also support user-defined\nclasses that inherit from those base classes. For example, if you define\na common `DeclarativeBase` subclass that's used throughout your project\n(e.g., `class Base(DeclarativeBase) ...` in `base.py`), you can add it to\nthis list (`runtime-evaluated-base-classes = [\"base.Base\"]`) to exempt\nmodels from being moved into type-checking blocks.", "type": [ "array", "null" @@ -1491,7 +1481,7 @@ } }, "runtime-evaluated-decorators": { - "description": "Exempt classes and functions decorated with any of the enumerated decorators from being moved into type-checking blocks.\n\nCommon examples include Pydantic's `@pydantic.validate_call` decorator (for functions) and attrs' `@attrs.define` decorator (for classes).\n\nThis also supports framework decorators like FastAPI's `fastapi.FastAPI.get` which will work across assignments in the same module.\n\nFor example: ```python import fastapi\n\napp = FastAPI(\"app\")\n\n@app.get(\"/home\") def home() -> str: ... ```\n\nHere `app.get` will correctly be identified as `fastapi.FastAPI.get`.", + "description": "Exempt classes and functions decorated with any of the enumerated\ndecorators from being moved into type-checking blocks.\n\nCommon examples include Pydantic's `@pydantic.validate_call` decorator\n(for functions) and attrs' `@attrs.define` decorator (for classes).\n\nThis also supports framework decorators like FastAPI's `fastapi.FastAPI.get`\nwhich will work across assignments in the same module.\n\nFor example:\n```python\nimport fastapi\n\napp = FastAPI(\"app\")\n\n@app.get(\"/home\")\ndef home() -> str: ...\n```\n\nHere `app.get` will correctly be identified as `fastapi.FastAPI.get`.", "type": [ "array", "null" @@ -1501,7 +1491,7 @@ } }, "strict": { - "description": "Enforce `TC001`, `TC002`, and `TC003` rules even when valid runtime imports are present for the same module.\n\nSee flake8-type-checking's [strict](https://github.com/snok/flake8-type-checking#strict) option.", + "description": "Enforce `TC001`, `TC002`, and `TC003` rules even when valid runtime imports\nare present for the same module.\n\nSee flake8-type-checking's [strict](https://github.com/snok/flake8-type-checking#strict) option.", "type": [ "boolean", "null" @@ -1529,17 +1519,17 @@ "type": "object", "properties": { "docstring-code-format": { - "description": "Whether to format code snippets in docstrings.\n\nWhen this is enabled, Python code examples within docstrings are automatically reformatted.\n\nFor example, when this is enabled, the following code:\n\n```python def f(x): \"\"\" Something about `f`. And an example in doctest format:\n\n>>> f( x )\n\nMarkdown is also supported:\n\n```py f( x ) ```\n\nAs are reStructuredText literal blocks::\n\nf( x )\n\nAnd reStructuredText code blocks:\n\n.. code-block:: python\n\nf( x ) \"\"\" pass ```\n\n... will be reformatted (assuming the rest of the options are set to their defaults) as:\n\n```python def f(x): \"\"\" Something about `f`. And an example in doctest format:\n\n>>> f(x)\n\nMarkdown is also supported:\n\n```py f(x) ```\n\nAs are reStructuredText literal blocks::\n\nf(x)\n\nAnd reStructuredText code blocks:\n\n.. code-block:: python\n\nf(x) \"\"\" pass ```\n\nIf a code snippet in a docstring contains invalid Python code or if the formatter would otherwise write invalid Python code, then the code example is ignored by the formatter and kept as-is.\n\nCurrently, doctest, Markdown, reStructuredText literal blocks, and reStructuredText code blocks are all supported and automatically recognized. In the case of unlabeled fenced code blocks in Markdown and reStructuredText literal blocks, the contents are assumed to be Python and reformatted. As with any other format, if the contents aren't valid Python, then the block is left untouched automatically.", + "description": "Whether to format code snippets in docstrings.\n\nWhen this is enabled, Python code examples within docstrings are\nautomatically reformatted.\n\nFor example, when this is enabled, the following code:\n\n```python\ndef f(x):\n \"\"\"\n Something about `f`. And an example in doctest format:\n\n >>> f( x )\n\n Markdown is also supported:\n\n ```py\n f( x )\n ```\n\n As are reStructuredText literal blocks::\n\n f( x )\n\n\n And reStructuredText code blocks:\n\n .. code-block:: python\n\n f( x )\n \"\"\"\n pass\n```\n\n... will be reformatted (assuming the rest of the options are set to\ntheir defaults) as:\n\n```python\ndef f(x):\n \"\"\"\n Something about `f`. And an example in doctest format:\n\n >>> f(x)\n\n Markdown is also supported:\n\n ```py\n f(x)\n ```\n\n As are reStructuredText literal blocks::\n\n f(x)\n\n\n And reStructuredText code blocks:\n\n .. code-block:: python\n\n f(x)\n \"\"\"\n pass\n```\n\nIf a code snippet in a docstring contains invalid Python code or if the\nformatter would otherwise write invalid Python code, then the code\nexample is ignored by the formatter and kept as-is.\n\nCurrently, doctest, Markdown, reStructuredText literal blocks, and\nreStructuredText code blocks are all supported and automatically\nrecognized. In the case of unlabeled fenced code blocks in Markdown and\nreStructuredText literal blocks, the contents are assumed to be Python\nand reformatted. As with any other format, if the contents aren't valid\nPython, then the block is left untouched automatically.", "type": [ "boolean", "null" ] }, "docstring-code-line-length": { - "description": "Set the line length used when formatting code snippets in docstrings.\n\nThis only has an effect when the `docstring-code-format` setting is enabled.\n\nThe default value for this setting is `\"dynamic\"`, which has the effect of ensuring that any reformatted code examples in docstrings adhere to the global line length configuration that is used for the surrounding Python code. The point of this setting is that it takes the indentation of the docstring into account when reformatting code examples.\n\nAlternatively, this can be set to a fixed integer, which will result in the same line length limit being applied to all reformatted code examples in docstrings. When set to a fixed integer, the indent of the docstring is not taken into account. That is, this may result in lines in the reformatted code example that exceed the globally configured line length limit.\n\nFor example, when this is set to `20` and [`docstring-code-format`](#docstring-code-format) is enabled, then this code:\n\n```python def f(x): ''' Something about `f`. And an example:\n\n.. code-block:: python\n\nfoo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear) ''' pass ```\n\n... will be reformatted (assuming the rest of the options are set to their defaults) as:\n\n```python def f(x): \"\"\" Something about `f`. And an example:\n\n.. code-block:: python\n\n( foo, bar, quux, ) = this_is_a_long_line( lion, hippo, lemur, bear, ) \"\"\" pass ```", + "description": "Set the line length used when formatting code snippets in docstrings.\n\nThis only has an effect when the `docstring-code-format` setting is\nenabled.\n\nThe default value for this setting is `\"dynamic\"`, which has the effect\nof ensuring that any reformatted code examples in docstrings adhere to\nthe global line length configuration that is used for the surrounding\nPython code. The point of this setting is that it takes the indentation\nof the docstring into account when reformatting code examples.\n\nAlternatively, this can be set to a fixed integer, which will result\nin the same line length limit being applied to all reformatted code\nexamples in docstrings. When set to a fixed integer, the indent of the\ndocstring is not taken into account. That is, this may result in lines\nin the reformatted code example that exceed the globally configured\nline length limit.\n\nFor example, when this is set to `20` and [`docstring-code-format`](#docstring-code-format)\nis enabled, then this code:\n\n```python\ndef f(x):\n '''\n Something about `f`. And an example:\n\n .. code-block:: python\n\n foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)\n '''\n pass\n```\n\n... will be reformatted (assuming the rest of the options are set\nto their defaults) as:\n\n```python\ndef f(x):\n \"\"\"\n Something about `f`. And an example:\n\n .. code-block:: python\n\n (\n foo,\n bar,\n quux,\n ) = this_is_a_long_line(\n lion,\n hippo,\n lemur,\n bear,\n )\n \"\"\"\n pass\n```", "anyOf": [ { - "$ref": "#/definitions/DocstringCodeLineWidth" + "$ref": "#/$defs/DocstringCodeLineWidth" }, { "type": "null" @@ -1547,7 +1537,7 @@ ] }, "exclude": { - "description": "A list of file patterns to exclude from formatting in addition to the files excluded globally (see [`exclude`](#exclude), and [`extend-exclude`](#extend-exclude)).\n\nExclusions are based on globs, and can be either:\n\n- Single-path patterns, like `.mypy_cache` (to exclude any directory named `.mypy_cache` in the tree), `foo.py` (to exclude any file named `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ). - Relative patterns, like `directory/foo.py` (to exclude that specific file) or `directory/*.py` (to exclude any Python files in `directory`). Note that these paths are relative to the project root (e.g., the directory containing your `pyproject.toml`).\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "A list of file patterns to exclude from formatting in addition to the files excluded globally (see [`exclude`](#exclude), and [`extend-exclude`](#extend-exclude)).\n\nExclusions are based on globs, and can be either:\n\n- Single-path patterns, like `.mypy_cache` (to exclude any directory\n named `.mypy_cache` in the tree), `foo.py` (to exclude any file named\n `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ).\n- Relative patterns, like `directory/foo.py` (to exclude that specific\n file) or `directory/*.py` (to exclude any Python files in\n `directory`). Note that these paths are relative to the project root\n (e.g., the directory containing your `pyproject.toml`).\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -1557,10 +1547,10 @@ } }, "indent-style": { - "description": "Whether to use spaces or tabs for indentation.\n\n`indent-style = \"space\"` (default):\n\n```python def f(): print(\"Hello\") # Spaces indent the `print` statement. ```\n\n`indent-style = \"tab\"`:\n\n```python def f(): print(\"Hello\") # A tab `\\t` indents the `print` statement. ```\n\nPEP 8 recommends using spaces for [indentation](https://peps.python.org/pep-0008/#indentation). We care about accessibility; if you do not need tabs for accessibility, we do not recommend you use them.\n\nSee [`indent-width`](#indent-width) to configure the number of spaces per indentation and the tab width.", + "description": "Whether to use spaces or tabs for indentation.\n\n`indent-style = \"space\"` (default):\n\n```python\ndef f():\n print(\"Hello\") # Spaces indent the `print` statement.\n```\n\n`indent-style = \"tab\"`:\n\n```python\ndef f():\n print(\"Hello\") # A tab `\\t` indents the `print` statement.\n```\n\nPEP 8 recommends using spaces for [indentation](https://peps.python.org/pep-0008/#indentation).\nWe care about accessibility; if you do not need tabs for accessibility, we do not recommend you use them.\n\nSee [`indent-width`](#indent-width) to configure the number of spaces per indentation and the tab width.", "anyOf": [ { - "$ref": "#/definitions/IndentStyle" + "$ref": "#/$defs/IndentStyle" }, { "type": "null" @@ -1568,10 +1558,10 @@ ] }, "line-ending": { - "description": "The character Ruff uses at the end of a line.\n\n* `auto`: The newline style is detected automatically on a file per file basis. Files with mixed line endings will be converted to the first detected line ending. Defaults to `\\n` for files that contain no line endings. * `lf`: Line endings will be converted to `\\n`. The default line ending on Unix. * `cr-lf`: Line endings will be converted to `\\r\\n`. The default line ending on Windows. * `native`: Line endings will be converted to `\\n` on Unix and `\\r\\n` on Windows.", + "description": "The character Ruff uses at the end of a line.\n\n* `auto`: The newline style is detected automatically on a file per file basis. Files with mixed line endings will be converted to the first detected line ending. Defaults to `\\n` for files that contain no line endings.\n* `lf`: Line endings will be converted to `\\n`. The default line ending on Unix.\n* `cr-lf`: Line endings will be converted to `\\r\\n`. The default line ending on Windows.\n* `native`: Line endings will be converted to `\\n` on Unix and `\\r\\n` on Windows.", "anyOf": [ { - "$ref": "#/definitions/LineEnding" + "$ref": "#/$defs/LineEnding" }, { "type": "null" @@ -1586,10 +1576,10 @@ ] }, "quote-style": { - "description": "Configures the preferred quote character for strings. The recommended options are\n\n* `double` (default): Use double quotes `\"` * `single`: Use single quotes `'`\n\nIn compliance with [PEP 8](https://peps.python.org/pep-0008/) and [PEP 257](https://peps.python.org/pep-0257/), Ruff prefers double quotes for triple quoted strings and docstrings even when using `quote-style = \"single\"`.\n\nRuff deviates from using the configured quotes if doing so prevents the need for escaping quote characters inside the string:\n\n```python a = \"a string without any quotes\" b = \"It's monday morning\" ```\n\nRuff will change the quotes of the string assigned to `a` to single quotes when using `quote-style = \"single\"`. However, Ruff uses double quotes for the string assigned to `b` because using single quotes would require escaping the `'`, which leads to the less readable code: `'It\\'s monday morning'`.\n\nIn addition, Ruff supports the quote style `preserve` for projects that already use a mixture of single and double quotes and can't migrate to the `double` or `single` style. The quote style `preserve` leaves the quotes of all strings unchanged.", + "description": "Configures the preferred quote character for strings. The recommended options are\n\n* `double` (default): Use double quotes `\"`\n* `single`: Use single quotes `'`\n\nIn compliance with [PEP 8](https://peps.python.org/pep-0008/) and [PEP 257](https://peps.python.org/pep-0257/),\nRuff prefers double quotes for triple quoted strings and docstrings even when using `quote-style = \"single\"`.\n\nRuff deviates from using the configured quotes if doing so prevents the need for\nescaping quote characters inside the string:\n\n```python\na = \"a string without any quotes\"\nb = \"It's monday morning\"\n```\n\nRuff will change the quotes of the string assigned to `a` to single quotes when using `quote-style = \"single\"`.\nHowever, Ruff uses double quotes for the string assigned to `b` because using single quotes would require escaping the `'`,\nwhich leads to the less readable code: `'It\\'s monday morning'`.\n\nIn addition, Ruff supports the quote style `preserve` for projects that already use\na mixture of single and double quotes and can't migrate to the `double` or `single` style.\nThe quote style `preserve` leaves the quotes of all strings unchanged.", "anyOf": [ { - "$ref": "#/definitions/QuoteStyle" + "$ref": "#/$defs/QuoteStyle" }, { "type": "null" @@ -1597,7 +1587,7 @@ ] }, "skip-magic-trailing-comma": { - "description": "Ruff uses existing trailing commas as an indication that short lines should be left separate. If this option is set to `true`, the magic trailing comma is ignored.\n\nFor example, Ruff leaves the arguments separate even though collapsing the arguments to a single line doesn't exceed the line length if `skip-magic-trailing-comma = false`:\n\n```python # The arguments remain on separate lines because of the trailing comma after `b` def test( a, b, ): pass ```\n\nSetting `skip-magic-trailing-comma = true` changes the formatting to:\n\n```python # The arguments are collapsed to a single line because the trailing comma is ignored def test(a, b): pass ```", + "description": "Ruff uses existing trailing commas as an indication that short lines should be left separate.\nIf this option is set to `true`, the magic trailing comma is ignored.\n\nFor example, Ruff leaves the arguments separate even though\ncollapsing the arguments to a single line doesn't exceed the line length if `skip-magic-trailing-comma = false`:\n\n```python\n# The arguments remain on separate lines because of the trailing comma after `b`\ndef test(\n a,\n b,\n): pass\n```\n\nSetting `skip-magic-trailing-comma = true` changes the formatting to:\n\n```python\n# The arguments are collapsed to a single line because the trailing comma is ignored\ndef test(a, b):\n pass\n```", "type": [ "boolean", "null" @@ -1609,7 +1599,7 @@ "ImportSection": { "anyOf": [ { - "$ref": "#/definitions/ImportType" + "$ref": "#/$defs/ImportType" }, { "type": "string" @@ -1631,16 +1621,12 @@ { "description": "Use tabs to indent code.", "type": "string", - "enum": [ - "tab" - ] + "const": "tab" }, { "description": "Use [`IndentWidth`] spaces to indent code.", "type": "string", - "enum": [ - "space" - ] + "const": "space" } ] }, @@ -1648,21 +1634,22 @@ "description": "The size of a tab.", "type": "integer", "format": "uint8", - "minimum": 1.0 + "maximum": 255, + "minimum": 1 }, "IsortOptions": { "description": "Options for the `isort` plugin.", "type": "object", "properties": { "case-sensitive": { - "description": "Sort imports taking into account case sensitivity.\n\nNote that the [`order-by-type`](#lint_isort_order-by-type) setting will take precedence over this one when enabled.", + "description": "Sort imports taking into account case sensitivity.\n\nNote that the [`order-by-type`](#lint_isort_order-by-type) setting will\ntake precedence over this one when enabled.", "type": [ "boolean", "null" ] }, "classes": { - "description": "An override list of tokens to always recognize as a Class for [`order-by-type`](#lint_isort_order-by-type) regardless of casing.", + "description": "An override list of tokens to always recognize as a Class for\n[`order-by-type`](#lint_isort_order-by-type) regardless of casing.", "type": [ "array", "null" @@ -1672,14 +1659,14 @@ } }, "combine-as-imports": { - "description": "Combines as imports on the same line. See isort's [`combine-as-imports`](https://pycqa.github.io/isort/docs/configuration/options.html#combine-as-imports) option.", + "description": "Combines as imports on the same line. See isort's [`combine-as-imports`](https://pycqa.github.io/isort/docs/configuration/options.html#combine-as-imports)\noption.", "type": [ "boolean", "null" ] }, "constants": { - "description": "An override list of tokens to always recognize as a CONSTANT for [`order-by-type`](#lint_isort_order-by-type) regardless of casing.", + "description": "An override list of tokens to always recognize as a CONSTANT\nfor [`order-by-type`](#lint_isort_order-by-type) regardless of casing.", "type": [ "array", "null" @@ -1692,7 +1679,7 @@ "description": "Define a default section for any imports that don't fit into the specified [`section-order`](#lint_isort_section-order).", "anyOf": [ { - "$ref": "#/definitions/ImportSection" + "$ref": "#/$defs/ImportSection" }, { "type": "null" @@ -1700,14 +1687,14 @@ ] }, "detect-same-package": { - "description": "Whether to automatically mark imports from within the same package as first-party. For example, when `detect-same-package = true`, then when analyzing files within the `foo` package, any imports from within the `foo` package will be considered first-party.\n\nThis heuristic is often unnecessary when `src` is configured to detect all first-party sources; however, if `src` is _not_ configured, this heuristic can be useful to detect first-party imports from _within_ (but not _across_) first-party packages.", + "description": "Whether to automatically mark imports from within the same package as first-party.\nFor example, when `detect-same-package = true`, then when analyzing files within the\n`foo` package, any imports from within the `foo` package will be considered first-party.\n\nThis heuristic is often unnecessary when `src` is configured to detect all first-party\nsources; however, if `src` is _not_ configured, this heuristic can be useful to detect\nfirst-party imports from _within_ (but not _across_) first-party packages.", "type": [ "boolean", "null" ] }, "extra-standard-library": { - "description": "A list of modules to consider standard-library, in addition to those known to Ruff in advance.\n\nSupports glob patterns. For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "A list of modules to consider standard-library, in addition to those\nknown to Ruff in advance.\n\nSupports glob patterns. For more information on the glob syntax, refer\nto the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -1724,7 +1711,7 @@ ] }, "force-sort-within-sections": { - "description": "Don't sort straight-style imports (like `import sys`) before from-style imports (like `from itertools import groupby`). Instead, sort the imports by module, independent of import style.", + "description": "Don't sort straight-style imports (like `import sys`) before from-style\nimports (like `from itertools import groupby`). Instead, sort the\nimports by module, independent of import style.", "type": [ "boolean", "null" @@ -1741,14 +1728,14 @@ } }, "force-wrap-aliases": { - "description": "Force `import from` statements with multiple members and at least one alias (e.g., `import A as B`) to wrap such that every line contains exactly one member. For example, this formatting would be retained, rather than condensing to a single line:\n\n```python from .utils import ( test_directory as test_directory, test_id as test_id ) ```\n\nNote that this setting is only effective when combined with `combine-as-imports = true`. When [`combine-as-imports`](#lint_isort_combine-as-imports) isn't enabled, every aliased `import from` will be given its own line, in which case, wrapping is not necessary.\n\nWhen using the formatter, ensure that [`format.skip-magic-trailing-comma`](#format_skip-magic-trailing-comma) is set to `false` (default) when enabling `force-wrap-aliases` to avoid that the formatter collapses members if they all fit on a single line.", + "description": "Force `import from` statements with multiple members and at least one\nalias (e.g., `import A as B`) to wrap such that every line contains\nexactly one member. For example, this formatting would be retained,\nrather than condensing to a single line:\n\n```python\nfrom .utils import (\n test_directory as test_directory,\n test_id as test_id\n)\n```\n\nNote that this setting is only effective when combined with\n`combine-as-imports = true`. When [`combine-as-imports`](#lint_isort_combine-as-imports) isn't\nenabled, every aliased `import from` will be given its own line, in\nwhich case, wrapping is not necessary.\n\nWhen using the formatter, ensure that [`format.skip-magic-trailing-comma`](#format_skip-magic-trailing-comma) is set to `false` (default)\nwhen enabling `force-wrap-aliases` to avoid that the formatter collapses members if they all fit on a single line.", "type": [ "boolean", "null" ] }, "forced-separate": { - "description": "A list of modules to separate into auxiliary block(s) of imports, in the order specified.", + "description": "A list of modules to separate into auxiliary block(s) of imports,\nin the order specified.", "type": [ "array", "null" @@ -1758,14 +1745,14 @@ } }, "from-first": { - "description": "Whether to place `import from` imports before straight imports when sorting.\n\nFor example, by default, imports will be sorted such that straight imports appear before `import from` imports, as in: ```python import os import sys from typing import List ```\n\nSetting `from-first = true` will instead sort such that `import from` imports appear before straight imports, as in: ```python from typing import List import os import sys ```", + "description": "Whether to place `import from` imports before straight imports when sorting.\n\nFor example, by default, imports will be sorted such that straight imports appear\nbefore `import from` imports, as in:\n```python\nimport os\nimport sys\nfrom typing import List\n```\n\nSetting `from-first = true` will instead sort such that `import from` imports appear\nbefore straight imports, as in:\n```python\nfrom typing import List\nimport os\nimport sys\n```", "type": [ "boolean", "null" ] }, "known-first-party": { - "description": "A list of modules to consider first-party, regardless of whether they can be identified as such via introspection of the local filesystem.\n\nSupports glob patterns. For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "A list of modules to consider first-party, regardless of whether they\ncan be identified as such via introspection of the local filesystem.\n\nSupports glob patterns. For more information on the glob syntax, refer\nto the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -1775,7 +1762,7 @@ } }, "known-local-folder": { - "description": "A list of modules to consider being a local folder. Generally, this is reserved for relative imports (`from . import module`).\n\nSupports glob patterns. For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "A list of modules to consider being a local folder.\nGenerally, this is reserved for relative imports (`from . import module`).\n\nSupports glob patterns. For more information on the glob syntax, refer\nto the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -1785,7 +1772,7 @@ } }, "known-third-party": { - "description": "A list of modules to consider third-party, regardless of whether they can be identified as such via introspection of the local filesystem.\n\nSupports glob patterns. For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "A list of modules to consider third-party, regardless of whether they\ncan be identified as such via introspection of the local filesystem.\n\nSupports glob patterns. For more information on the glob syntax, refer\nto the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -1795,21 +1782,21 @@ } }, "length-sort": { - "description": "Sort imports by their string length, such that shorter imports appear before longer imports. For example, by default, imports will be sorted alphabetically, as in: ```python import collections import os ```\n\nSetting `length-sort = true` will instead sort such that shorter imports appear before longer imports, as in: ```python import os import collections ```", + "description": "Sort imports by their string length, such that shorter imports appear\nbefore longer imports. For example, by default, imports will be sorted\nalphabetically, as in:\n```python\nimport collections\nimport os\n```\n\nSetting `length-sort = true` will instead sort such that shorter imports\nappear before longer imports, as in:\n```python\nimport os\nimport collections\n```", "type": [ "boolean", "null" ] }, "length-sort-straight": { - "description": "Sort straight imports by their string length. Similar to [`length-sort`](#lint_isort_length-sort), but applies only to straight imports and doesn't affect `from` imports.", + "description": "Sort straight imports by their string length. Similar to [`length-sort`](#lint_isort_length-sort),\nbut applies only to straight imports and doesn't affect `from` imports.", "type": [ "boolean", "null" ] }, "lines-after-imports": { - "description": "The number of blank lines to place after imports. Use `-1` for automatic determination.\n\nRuff uses at most one blank line after imports in typing stub files (files with `.pyi` extension) in accordance to the typing style recommendations ([source](https://typing.python.org/en/latest/guides/writing_stubs.html#blank-lines)).\n\nWhen using the formatter, only the values `-1`, `1`, and `2` are compatible because it enforces at least one empty and at most two empty lines after imports.", + "description": "The number of blank lines to place after imports.\nUse `-1` for automatic determination.\n\nRuff uses at most one blank line after imports in typing stub files (files with `.pyi` extension) in accordance to\nthe typing style recommendations ([source](https://typing.python.org/en/latest/guides/writing_stubs.html#blank-lines)).\n\nWhen using the formatter, only the values `-1`, `1`, and `2` are compatible because\nit enforces at least one empty and at most two empty lines after imports.", "type": [ "integer", "null" @@ -1817,43 +1804,43 @@ "format": "int" }, "lines-between-types": { - "description": "The number of lines to place between \"direct\" and `import from` imports.\n\nWhen using the formatter, only the values `0` and `1` are compatible because it preserves up to one empty line after imports in nested blocks.", + "description": "The number of lines to place between \"direct\" and `import from` imports.\n\nWhen using the formatter, only the values `0` and `1` are compatible because\nit preserves up to one empty line after imports in nested blocks.", "type": [ "integer", "null" ], "format": "uint", - "minimum": 0.0 + "minimum": 0 }, "no-lines-before": { - "description": "A list of sections that should _not_ be delineated from the previous section via empty lines.", + "description": "A list of sections that should _not_ be delineated from the previous\nsection via empty lines.", "type": [ "array", "null" ], "items": { - "$ref": "#/definitions/ImportSection" + "$ref": "#/$defs/ImportSection" } }, "no-sections": { - "description": "Put all imports into the same section bucket.\n\nFor example, rather than separating standard library and third-party imports, as in: ```python import os import sys\n\nimport numpy import pandas ```\n\nSetting `no-sections = true` will instead group all imports into a single section: ```python import numpy import os import pandas import sys ```", + "description": "Put all imports into the same section bucket.\n\nFor example, rather than separating standard library and third-party imports, as in:\n```python\nimport os\nimport sys\n\nimport numpy\nimport pandas\n```\n\nSetting `no-sections = true` will instead group all imports into a single section:\n```python\nimport numpy\nimport os\nimport pandas\nimport sys\n```", "type": [ "boolean", "null" ] }, "order-by-type": { - "description": "Order imports by type, which is determined by case, in addition to alphabetically.\n\nNote that this option takes precedence over the [`case-sensitive`](#lint_isort_case-sensitive) setting when enabled.", + "description": "Order imports by type, which is determined by case, in addition to\nalphabetically.\n\nNote that this option takes precedence over the\n[`case-sensitive`](#lint_isort_case-sensitive) setting when enabled.", "type": [ "boolean", "null" ] }, "relative-imports-order": { - "description": "Whether to place \"closer\" imports (fewer `.` characters, most local) before \"further\" imports (more `.` characters, least local), or vice versa.\n\nThe default (\"furthest-to-closest\") is equivalent to isort's [`reverse-relative`](https://pycqa.github.io/isort/docs/configuration/options.html#reverse-relative) default (`reverse-relative = false`); setting this to \"closest-to-furthest\" is equivalent to isort's `reverse-relative = true`.", + "description": "Whether to place \"closer\" imports (fewer `.` characters, most local)\nbefore \"further\" imports (more `.` characters, least local), or vice\nversa.\n\nThe default (\"furthest-to-closest\") is equivalent to isort's\n[`reverse-relative`](https://pycqa.github.io/isort/docs/configuration/options.html#reverse-relative) default (`reverse-relative = false`); setting\nthis to \"closest-to-furthest\" is equivalent to isort's\n`reverse-relative = true`.", "anyOf": [ { - "$ref": "#/definitions/RelativeImportsOrder" + "$ref": "#/$defs/RelativeImportsOrder" }, { "type": "null" @@ -1867,7 +1854,7 @@ "null" ], "items": { - "$ref": "#/definitions/NameImports" + "$ref": "#/$defs/NameImports" } }, "section-order": { @@ -1877,11 +1864,11 @@ "null" ], "items": { - "$ref": "#/definitions/ImportSection" + "$ref": "#/$defs/ImportSection" } }, "sections": { - "description": "A list of mappings from section names to modules.\n\nBy default, imports are categorized according to their type (e.g., `future`, `third-party`, and so on). This setting allows you to group modules into custom sections, to augment or override the built-in sections.\n\nFor example, to group all testing utilities, you could create a `testing` section: ```toml testing = [\"pytest\", \"hypothesis\"] ```\n\nThe values in the list are treated as glob patterns. For example, to match all packages in the LangChain ecosystem (`langchain-core`, `langchain-openai`, etc.): ```toml langchain = [\"langchain-*\"] ```\n\nCustom sections should typically be inserted into the [`section-order`](#lint_isort_section-order) list to ensure that they're displayed as a standalone group and in the intended order, as in: ```toml section-order = [ \"future\", \"standard-library\", \"third-party\", \"first-party\", \"local-folder\", \"testing\" ] ```\n\nIf a custom section is omitted from [`section-order`](#lint_isort_section-order), imports in that section will be assigned to the [`default-section`](#lint_isort_default-section) (which defaults to `third-party`).", + "description": "A list of mappings from section names to modules.\n\nBy default, imports are categorized according to their type (e.g., `future`, `third-party`,\nand so on). This setting allows you to group modules into custom sections, to augment or\noverride the built-in sections.\n\nFor example, to group all testing utilities, you could create a `testing` section:\n```toml\ntesting = [\"pytest\", \"hypothesis\"]\n```\n\nThe values in the list are treated as glob patterns. For example, to match all packages in\nthe LangChain ecosystem (`langchain-core`, `langchain-openai`, etc.):\n```toml\nlangchain = [\"langchain-*\"]\n```\n\nCustom sections should typically be inserted into the [`section-order`](#lint_isort_section-order) list to ensure that\nthey're displayed as a standalone group and in the intended order, as in:\n```toml\nsection-order = [\n \"future\",\n \"standard-library\",\n \"third-party\",\n \"first-party\",\n \"local-folder\",\n \"testing\"\n]\n```\n\nIf a custom section is omitted from [`section-order`](#lint_isort_section-order), imports in that section will be\nassigned to the [`default-section`](#lint_isort_default-section) (which defaults to `third-party`).", "type": [ "object", "null" @@ -1904,14 +1891,14 @@ } }, "split-on-trailing-comma": { - "description": "If a comma is placed after the last member in a multi-line import, then the imports will never be folded into one line.\n\nSee isort's [`split-on-trailing-comma`](https://pycqa.github.io/isort/docs/configuration/options.html#split-on-trailing-comma) option.\n\nWhen using the formatter, ensure that [`format.skip-magic-trailing-comma`](#format_skip-magic-trailing-comma) is set to `false` (default) when enabling `split-on-trailing-comma` to avoid that the formatter removes the trailing commas.", + "description": "If a comma is placed after the last member in a multi-line import, then\nthe imports will never be folded into one line.\n\nSee isort's [`split-on-trailing-comma`](https://pycqa.github.io/isort/docs/configuration/options.html#split-on-trailing-comma) option.\n\nWhen using the formatter, ensure that [`format.skip-magic-trailing-comma`](#format_skip-magic-trailing-comma) is set to `false` (default) when enabling `split-on-trailing-comma`\nto avoid that the formatter removes the trailing commas.", "type": [ "boolean", "null" ] }, "variables": { - "description": "An override list of tokens to always recognize as a var for [`order-by-type`](#lint_isort_order-by-type) regardless of casing.", + "description": "An override list of tokens to always recognize as a var\nfor [`order-by-type`](#lint_isort_order-by-type) regardless of casing.", "type": [ "array", "null" @@ -1926,32 +1913,24 @@ "LineEnding": { "oneOf": [ { - "description": "The newline style is detected automatically on a file per file basis. Files with mixed line endings will be converted to the first detected line ending. Defaults to [`LineEnding::Lf`] for a files that contain no line endings.", + "description": "The newline style is detected automatically on a file per file basis.\nFiles with mixed line endings will be converted to the first detected line ending.\nDefaults to [`LineEnding::Lf`] for a files that contain no line endings.", "type": "string", - "enum": [ - "auto" - ] + "const": "auto" }, { "description": "Line endings will be converted to `\\n` as is common on Unix.", "type": "string", - "enum": [ - "lf" - ] + "const": "lf" }, { "description": "Line endings will be converted to `\\r\\n` as is common on Windows.", "type": "string", - "enum": [ - "cr-lf" - ] + "const": "cr-lf" }, { "description": "Line endings will be converted to `\\n` on Unix and `\\r\\n` on Windows.", "type": "string", - "enum": [ - "native" - ] + "const": "native" } ] }, @@ -1959,21 +1938,22 @@ "description": "The length of a line of text that is considered too long.\n\nThe allowed range of values is 1..=320", "type": "integer", "format": "uint16", - "maximum": 320.0, - "minimum": 1.0 + "maximum": 320, + "minimum": 1 }, "LineWidth": { "description": "The maximum visual width to which the formatter should try to limit a line.", "type": "integer", "format": "uint16", - "minimum": 1.0 + "maximum": 65535, + "minimum": 1 }, "LintOptions": { "description": "Configures how Ruff checks your code.\n\nOptions specified in the `lint` section take precedence over the deprecated top-level settings.", "type": "object", "properties": { "allowed-confusables": { - "description": "A list of allowed \"confusable\" Unicode characters to ignore when enforcing `RUF001`, `RUF002`, and `RUF003`.", + "description": "A list of allowed \"confusable\" Unicode characters to ignore when\nenforcing `RUF001`, `RUF002`, and `RUF003`.", "type": [ "array", "null" @@ -1985,14 +1965,14 @@ } }, "dummy-variable-rgx": { - "description": "A regular expression used to identify \"dummy\" variables, or those which should be ignored when enforcing (e.g.) unused-variable rules. The default expression matches `_`, `__`, and `_var`, but not `_var_`.", + "description": "A regular expression used to identify \"dummy\" variables, or those which\nshould be ignored when enforcing (e.g.) unused-variable rules. The\ndefault expression matches `_`, `__`, and `_var`, but not `_var_`.", "type": [ "string", "null" ] }, "exclude": { - "description": "A list of file patterns to exclude from linting in addition to the files excluded globally (see [`exclude`](#exclude), and [`extend-exclude`](#extend-exclude)).\n\nExclusions are based on globs, and can be either:\n\n- Single-path patterns, like `.mypy_cache` (to exclude any directory named `.mypy_cache` in the tree), `foo.py` (to exclude any file named `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ). - Relative patterns, like `directory/foo.py` (to exclude that specific file) or `directory/*.py` (to exclude any Python files in `directory`). Note that these paths are relative to the project root (e.g., the directory containing your `pyproject.toml`).\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "A list of file patterns to exclude from linting in addition to the files excluded globally (see [`exclude`](#exclude), and [`extend-exclude`](#extend-exclude)).\n\nExclusions are based on globs, and can be either:\n\n- Single-path patterns, like `.mypy_cache` (to exclude any directory\n named `.mypy_cache` in the tree), `foo.py` (to exclude any file named\n `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ).\n- Relative patterns, like `directory/foo.py` (to exclude that specific\n file) or `directory/*.py` (to exclude any Python files in\n `directory`). Note that these paths are relative to the project root\n (e.g., the directory containing your `pyproject.toml`).\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -2002,35 +1982,35 @@ } }, "explicit-preview-rules": { - "description": "Whether to require exact codes to select preview rules. When enabled, preview rules will not be selected by prefixes — the full code of each preview rule will be required to enable the rule.", + "description": "Whether to require exact codes to select preview rules. When enabled,\npreview rules will not be selected by prefixes — the full code of each\npreview rule will be required to enable the rule.", "type": [ "boolean", "null" ] }, "extend-fixable": { - "description": "A list of rule codes or prefixes to consider fixable, in addition to those specified by [`fixable`](#lint_fixable).", + "description": "A list of rule codes or prefixes to consider fixable, in addition to those\nspecified by [`fixable`](#lint_fixable).", "type": [ "array", "null" ], "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "extend-ignore": { - "description": "A list of rule codes or prefixes to ignore, in addition to those specified by `ignore`.", - "deprecated": true, + "description": "A list of rule codes or prefixes to ignore, in addition to those\nspecified by `ignore`.", "type": [ "array", "null" ], + "deprecated": true, "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "extend-per-file-ignores": { - "description": "A list of mappings from file pattern to rule codes or prefixes to exclude, in addition to any rules excluded by [`per-file-ignores`](#lint_per-file-ignores).", + "description": "A list of mappings from file pattern to rule codes or prefixes to\nexclude, in addition to any rules excluded by [`per-file-ignores`](#lint_per-file-ignores).", "type": [ "object", "null" @@ -2038,53 +2018,53 @@ "additionalProperties": { "type": "array", "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } } }, "extend-safe-fixes": { - "description": "A list of rule codes or prefixes for which unsafe fixes should be considered safe.", + "description": "A list of rule codes or prefixes for which unsafe fixes should be considered\nsafe.", "type": [ "array", "null" ], "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "extend-select": { - "description": "A list of rule codes or prefixes to enable, in addition to those specified by [`select`](#lint_select).", + "description": "A list of rule codes or prefixes to enable, in addition to those\nspecified by [`select`](#lint_select).", "type": [ "array", "null" ], "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "extend-unfixable": { - "description": "A list of rule codes or prefixes to consider non-auto-fixable, in addition to those specified by [`unfixable`](#lint_unfixable).", - "deprecated": true, + "description": "A list of rule codes or prefixes to consider non-auto-fixable, in addition to those\nspecified by [`unfixable`](#lint_unfixable).", "type": [ "array", "null" ], + "deprecated": true, "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "extend-unsafe-fixes": { - "description": "A list of rule codes or prefixes for which safe fixes should be considered unsafe.", + "description": "A list of rule codes or prefixes for which safe fixes should be considered\nunsafe.", "type": [ "array", "null" ], "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "external": { - "description": "A list of rule codes or prefixes that are unsupported by Ruff, but should be preserved when (e.g.) validating `# noqa` directives. Useful for retaining `# noqa` directives that cover plugins not yet implemented by Ruff.", + "description": "A list of rule codes or prefixes that are unsupported by Ruff, but should be\npreserved when (e.g.) validating `# noqa` directives. Useful for\nretaining `# noqa` directives that cover plugins not yet implemented\nby Ruff.", "type": [ "array", "null" @@ -2094,20 +2074,20 @@ } }, "fixable": { - "description": "A list of rule codes or prefixes to consider fixable. By default, all rules are considered fixable.", + "description": "A list of rule codes or prefixes to consider fixable. By default,\nall rules are considered fixable.", "type": [ "array", "null" ], "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "flake8-annotations": { "description": "Options for the `flake8-annotations` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8AnnotationsOptions" + "$ref": "#/$defs/Flake8AnnotationsOptions" }, { "type": "null" @@ -2118,7 +2098,7 @@ "description": "Options for the `flake8-bandit` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8BanditOptions" + "$ref": "#/$defs/Flake8BanditOptions" }, { "type": "null" @@ -2129,7 +2109,7 @@ "description": "Options for the `flake8-boolean-trap` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8BooleanTrapOptions" + "$ref": "#/$defs/Flake8BooleanTrapOptions" }, { "type": "null" @@ -2140,7 +2120,7 @@ "description": "Options for the `flake8-bugbear` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8BugbearOptions" + "$ref": "#/$defs/Flake8BugbearOptions" }, { "type": "null" @@ -2151,7 +2131,7 @@ "description": "Options for the `flake8-builtins` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8BuiltinsOptions" + "$ref": "#/$defs/Flake8BuiltinsOptions" }, { "type": "null" @@ -2162,7 +2142,7 @@ "description": "Options for the `flake8-comprehensions` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8ComprehensionsOptions" + "$ref": "#/$defs/Flake8ComprehensionsOptions" }, { "type": "null" @@ -2173,7 +2153,7 @@ "description": "Options for the `flake8-copyright` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8CopyrightOptions" + "$ref": "#/$defs/Flake8CopyrightOptions" }, { "type": "null" @@ -2184,7 +2164,7 @@ "description": "Options for the `flake8-errmsg` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8ErrMsgOptions" + "$ref": "#/$defs/Flake8ErrMsgOptions" }, { "type": "null" @@ -2195,7 +2175,7 @@ "description": "Options for the `flake8-gettext` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8GetTextOptions" + "$ref": "#/$defs/Flake8GetTextOptions" }, { "type": "null" @@ -2206,7 +2186,7 @@ "description": "Options for the `flake8-implicit-str-concat` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8ImplicitStrConcatOptions" + "$ref": "#/$defs/Flake8ImplicitStrConcatOptions" }, { "type": "null" @@ -2217,7 +2197,7 @@ "description": "Options for the `flake8-import-conventions` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8ImportConventionsOptions" + "$ref": "#/$defs/Flake8ImportConventionsOptions" }, { "type": "null" @@ -2228,7 +2208,7 @@ "description": "Options for the `flake8-pytest-style` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8PytestStyleOptions" + "$ref": "#/$defs/Flake8PytestStyleOptions" }, { "type": "null" @@ -2239,7 +2219,7 @@ "description": "Options for the `flake8-quotes` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8QuotesOptions" + "$ref": "#/$defs/Flake8QuotesOptions" }, { "type": "null" @@ -2250,7 +2230,7 @@ "description": "Options for the `flake8_self` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8SelfOptions" + "$ref": "#/$defs/Flake8SelfOptions" }, { "type": "null" @@ -2261,7 +2241,7 @@ "description": "Options for the `flake8-tidy-imports` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8TidyImportsOptions" + "$ref": "#/$defs/Flake8TidyImportsOptions" }, { "type": "null" @@ -2272,7 +2252,7 @@ "description": "Options for the `flake8-type-checking` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8TypeCheckingOptions" + "$ref": "#/$defs/Flake8TypeCheckingOptions" }, { "type": "null" @@ -2283,7 +2263,7 @@ "description": "Options for the `flake8-unused-arguments` plugin.", "anyOf": [ { - "$ref": "#/definitions/Flake8UnusedArgumentsOptions" + "$ref": "#/$defs/Flake8UnusedArgumentsOptions" }, { "type": "null" @@ -2291,35 +2271,35 @@ ] }, "future-annotations": { - "description": "Whether to allow rules to add `from __future__ import annotations` in cases where this would simplify a fix or enable a new diagnostic.\n\nFor example, `TC001`, `TC002`, and `TC003` can move more imports into `TYPE_CHECKING` blocks if `__future__` annotations are enabled.", + "description": "Whether to allow rules to add `from __future__ import annotations` in cases where this would\nsimplify a fix or enable a new diagnostic.\n\nFor example, `TC001`, `TC002`, and `TC003` can move more imports into `TYPE_CHECKING` blocks\nif `__future__` annotations are enabled.", "type": [ "boolean", "null" ] }, "ignore": { - "description": "A list of rule codes or prefixes to ignore. Prefixes can specify exact rules (like `F841`), entire categories (like `F`), or anything in between.\n\nWhen breaking ties between enabled and disabled rules (via `select` and `ignore`, respectively), more specific prefixes override less specific prefixes. `ignore` takes precedence over `select` if the same prefix appears in both.", + "description": "A list of rule codes or prefixes to ignore. Prefixes can specify exact\nrules (like `F841`), entire categories (like `F`), or anything in\nbetween.\n\nWhen breaking ties between enabled and disabled rules (via `select` and\n`ignore`, respectively), more specific prefixes override less\nspecific prefixes. `ignore` takes precedence over `select` if the same\nprefix appears in both.", "type": [ "array", "null" ], "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "ignore-init-module-imports": { - "description": "Avoid automatically removing unused imports in `__init__.py` files. Such imports will still be flagged, but with a dedicated message suggesting that the import is either added to the module's `__all__` symbol, or re-exported with a redundant alias (e.g., `import os as os`).\n\nThis option is enabled by default, but you can opt-in to removal of imports via an unsafe fix.", - "deprecated": true, + "description": "Avoid automatically removing unused imports in `__init__.py` files. Such\nimports will still be flagged, but with a dedicated message suggesting\nthat the import is either added to the module's `__all__` symbol, or\nre-exported with a redundant alias (e.g., `import os as os`).\n\nThis option is enabled by default, but you can opt-in to removal of imports\nvia an unsafe fix.", "type": [ "boolean", "null" - ] + ], + "deprecated": true }, "isort": { "description": "Options for the `isort` plugin.", "anyOf": [ { - "$ref": "#/definitions/IsortOptions" + "$ref": "#/$defs/IsortOptions" }, { "type": "null" @@ -2327,7 +2307,7 @@ ] }, "logger-objects": { - "description": "A list of objects that should be treated equivalently to a `logging.Logger` object.\n\nThis is useful for ensuring proper diagnostics (e.g., to identify `logging` deprecations and other best-practices) for projects that re-export a `logging.Logger` object from a common module.\n\nFor example, if you have a module `logging_setup.py` with the following contents: ```python import logging\n\nlogger = logging.getLogger(__name__) ```\n\nAdding `\"logging_setup.logger\"` to `logger-objects` will ensure that `logging_setup.logger` is treated as a `logging.Logger` object when imported from other modules (e.g., `from logging_setup import logger`).", + "description": "A list of objects that should be treated equivalently to a\n`logging.Logger` object.\n\nThis is useful for ensuring proper diagnostics (e.g., to identify\n`logging` deprecations and other best-practices) for projects that\nre-export a `logging.Logger` object from a common module.\n\nFor example, if you have a module `logging_setup.py` with the following\ncontents:\n```python\nimport logging\n\nlogger = logging.getLogger(__name__)\n```\n\nAdding `\"logging_setup.logger\"` to `logger-objects` will ensure that\n`logging_setup.logger` is treated as a `logging.Logger` object when\nimported from other modules (e.g., `from logging_setup import logger`).", "type": [ "array", "null" @@ -2340,7 +2320,7 @@ "description": "Options for the `mccabe` plugin.", "anyOf": [ { - "$ref": "#/definitions/McCabeOptions" + "$ref": "#/$defs/McCabeOptions" }, { "type": "null" @@ -2351,7 +2331,7 @@ "description": "Options for the `pep8-naming` plugin.", "anyOf": [ { - "$ref": "#/definitions/Pep8NamingOptions" + "$ref": "#/$defs/Pep8NamingOptions" }, { "type": "null" @@ -2359,7 +2339,7 @@ ] }, "per-file-ignores": { - "description": "A list of mappings from file pattern to rule codes or prefixes to exclude, when considering any matching files. An initial '!' negates the file pattern.", + "description": "A list of mappings from file pattern to rule codes or prefixes to\nexclude, when considering any matching files. An initial '!' negates\nthe file pattern.", "type": [ "object", "null" @@ -2367,12 +2347,12 @@ "additionalProperties": { "type": "array", "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } } }, "preview": { - "description": "Whether to enable preview mode. When preview mode is enabled, Ruff will use unstable rules and fixes.", + "description": "Whether to enable preview mode. When preview mode is enabled, Ruff will\nuse unstable rules and fixes.", "type": [ "boolean", "null" @@ -2382,7 +2362,7 @@ "description": "Options for the `pycodestyle` plugin.", "anyOf": [ { - "$ref": "#/definitions/PycodestyleOptions" + "$ref": "#/$defs/PycodestyleOptions" }, { "type": "null" @@ -2393,7 +2373,7 @@ "description": "Options for the `pydoclint` plugin.", "anyOf": [ { - "$ref": "#/definitions/PydoclintOptions" + "$ref": "#/$defs/PydoclintOptions" }, { "type": "null" @@ -2404,7 +2384,7 @@ "description": "Options for the `pydocstyle` plugin.", "anyOf": [ { - "$ref": "#/definitions/PydocstyleOptions" + "$ref": "#/$defs/PydocstyleOptions" }, { "type": "null" @@ -2415,7 +2395,7 @@ "description": "Options for the `pyflakes` plugin.", "anyOf": [ { - "$ref": "#/definitions/PyflakesOptions" + "$ref": "#/$defs/PyflakesOptions" }, { "type": "null" @@ -2426,7 +2406,7 @@ "description": "Options for the `pylint` plugin.", "anyOf": [ { - "$ref": "#/definitions/PylintOptions" + "$ref": "#/$defs/PylintOptions" }, { "type": "null" @@ -2437,7 +2417,7 @@ "description": "Options for the `pyupgrade` plugin.", "anyOf": [ { - "$ref": "#/definitions/PyUpgradeOptions" + "$ref": "#/$defs/PyUpgradeOptions" }, { "type": "null" @@ -2448,7 +2428,7 @@ "description": "Options for the `ruff` plugin", "anyOf": [ { - "$ref": "#/definitions/RuffOptions" + "$ref": "#/$defs/RuffOptions" }, { "type": "null" @@ -2456,17 +2436,17 @@ ] }, "select": { - "description": "A list of rule codes or prefixes to enable. Prefixes can specify exact rules (like `F841`), entire categories (like `F`), or anything in between.\n\nWhen breaking ties between enabled and disabled rules (via `select` and `ignore`, respectively), more specific prefixes override less specific prefixes. `ignore` takes precedence over `select` if the same prefix appears in both.", + "description": "A list of rule codes or prefixes to enable. Prefixes can specify exact\nrules (like `F841`), entire categories (like `F`), or anything in\nbetween.\n\nWhen breaking ties between enabled and disabled rules (via `select` and\n`ignore`, respectively), more specific prefixes override less\nspecific prefixes. `ignore` takes precedence over `select` if the\nsame prefix appears in both.", "type": [ "array", "null" ], "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } }, "task-tags": { - "description": "A list of task tags to recognize (e.g., \"TODO\", \"FIXME\", \"XXX\").\n\nComments starting with these tags will be ignored by commented-out code detection (`ERA`), and skipped by line-length rules (`E501`) if [`ignore-overlong-task-comments`](#lint_pycodestyle_ignore-overlong-task-comments) is set to `true`.", + "description": "A list of task tags to recognize (e.g., \"TODO\", \"FIXME\", \"XXX\").\n\nComments starting with these tags will be ignored by commented-out code\ndetection (`ERA`), and skipped by line-length rules (`E501`) if\n[`ignore-overlong-task-comments`](#lint_pycodestyle_ignore-overlong-task-comments) is set to `true`.", "type": [ "array", "null" @@ -2476,14 +2456,14 @@ } }, "typing-extensions": { - "description": "Whether to allow imports from the third-party `typing_extensions` module for Python versions before a symbol was added to the first-party `typing` module.\n\nMany rules try to import symbols from the `typing` module but fall back to `typing_extensions` for earlier versions of Python. This option can be used to disable this fallback behavior in cases where `typing_extensions` is not installed.", + "description": "Whether to allow imports from the third-party `typing_extensions` module for Python versions\nbefore a symbol was added to the first-party `typing` module.\n\nMany rules try to import symbols from the `typing` module but fall back to\n`typing_extensions` for earlier versions of Python. This option can be used to disable this\nfallback behavior in cases where `typing_extensions` is not installed.", "type": [ "boolean", "null" ] }, "typing-modules": { - "description": "A list of modules whose exports should be treated equivalently to members of the `typing` module.\n\nThis is useful for ensuring proper type annotation inference for projects that re-export `typing` and `typing_extensions` members from a compatibility module. If omitted, any members imported from modules apart from `typing` and `typing_extensions` will be treated as ordinary Python objects.", + "description": "A list of modules whose exports should be treated equivalently to\nmembers of the `typing` module.\n\nThis is useful for ensuring proper type annotation inference for\nprojects that re-export `typing` and `typing_extensions` members\nfrom a compatibility module. If omitted, any members imported from\nmodules apart from `typing` and `typing_extensions` will be treated\nas ordinary Python objects.", "type": [ "array", "null" @@ -2499,7 +2479,7 @@ "null" ], "items": { - "$ref": "#/definitions/RuleSelector" + "$ref": "#/$defs/RuleSelector" } } }, @@ -2516,7 +2496,7 @@ "null" ], "format": "uint", - "minimum": 0.0 + "minimum": 0 } }, "additionalProperties": false @@ -2568,7 +2548,7 @@ "type": "object", "properties": { "classmethod-decorators": { - "description": "A list of decorators that, when applied to a method, indicate that the method should be treated as a class method (in addition to the builtin `@classmethod`).\n\nFor example, Ruff will expect that any method decorated by a decorator in this list takes a `cls` argument as its first argument.\n\nExpects to receive a list of fully-qualified names (e.g., `pydantic.validator`, rather than `validator`) or alternatively a plain name which is then matched against the last segment in case the decorator itself consists of a dotted name.", + "description": "A list of decorators that, when applied to a method, indicate that the\nmethod should be treated as a class method (in addition to the builtin\n`@classmethod`).\n\nFor example, Ruff will expect that any method decorated by a decorator\nin this list takes a `cls` argument as its first argument.\n\nExpects to receive a list of fully-qualified names (e.g., `pydantic.validator`,\nrather than `validator`) or alternatively a plain name which is then matched against\nthe last segment in case the decorator itself consists of a dotted name.", "type": [ "array", "null" @@ -2578,7 +2558,7 @@ } }, "extend-ignore-names": { - "description": "Additional names (or patterns) to ignore when considering `pep8-naming` violations, in addition to those included in [`ignore-names`](#lint_pep8-naming_ignore-names).\n\nSupports glob patterns. For example, to ignore all names starting with `test_` or ending with `_test`, you could use `ignore-names = [\"test_*\", \"*_test\"]`. For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "Additional names (or patterns) to ignore when considering `pep8-naming` violations,\nin addition to those included in [`ignore-names`](#lint_pep8-naming_ignore-names).\n\nSupports glob patterns. For example, to ignore all names starting with `test_`\nor ending with `_test`, you could use `ignore-names = [\"test_*\", \"*_test\"]`.\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -2588,7 +2568,7 @@ } }, "ignore-names": { - "description": "A list of names (or patterns) to ignore when considering `pep8-naming` violations.\n\nSupports glob patterns. For example, to ignore all names starting with `test_` or ending with `_test`, you could use `ignore-names = [\"test_*\", \"*_test\"]`. For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", + "description": "A list of names (or patterns) to ignore when considering `pep8-naming` violations.\n\nSupports glob patterns. For example, to ignore all names starting with `test_`\nor ending with `_test`, you could use `ignore-names = [\"test_*\", \"*_test\"]`.\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ "array", "null" @@ -2598,7 +2578,7 @@ } }, "staticmethod-decorators": { - "description": "A list of decorators that, when applied to a method, indicate that the method should be treated as a static method (in addition to the builtin `@staticmethod`).\n\nFor example, Ruff will expect that any method decorated by a decorator in this list has no `self` or `cls` argument.\n\nExpects to receive a list of fully-qualified names (e.g., `belay.Device.teardown`, rather than `teardown`) or alternatively a plain name which is then matched against the last segment in case the decorator itself consists of a dotted name.", + "description": "A list of decorators that, when applied to a method, indicate that the\nmethod should be treated as a static method (in addition to the builtin\n`@staticmethod`).\n\nFor example, Ruff will expect that any method decorated by a decorator\nin this list has no `self` or `cls` argument.\n\nExpects to receive a list of fully-qualified names (e.g., `belay.Device.teardown`,\nrather than `teardown`) or alternatively a plain name which is then matched against\nthe last segment in case the decorator itself consists of a dotted name.", "type": [ "array", "null" @@ -2615,7 +2595,7 @@ "type": "object", "properties": { "keep-runtime-typing": { - "description": "Whether to avoid [PEP 585](https://peps.python.org/pep-0585/) (`List[int]` -> `list[int]`) and [PEP 604](https://peps.python.org/pep-0604/) (`Union[str, int]` -> `str | int`) rewrites even if a file imports `from __future__ import annotations`.\n\nThis setting is only applicable when the target Python version is below 3.9 and 3.10 respectively, and is most commonly used when working with libraries like Pydantic and FastAPI, which rely on the ability to parse type annotations at runtime. The use of `from __future__ import annotations` causes Python to treat the type annotations as strings, which typically allows for the use of language features that appear in later Python versions but are not yet supported by the current version (e.g., `str | int`). However, libraries that rely on runtime type annotations will break if the annotations are incompatible with the current Python version.\n\nFor example, while the following is valid Python 3.8 code due to the presence of `from __future__ import annotations`, the use of `str | int` prior to Python 3.10 will cause Pydantic to raise a `TypeError` at runtime:\n\n```python from __future__ import annotations\n\nimport pydantic\n\nclass Foo(pydantic.BaseModel): bar: str | int ```", + "description": "Whether to avoid [PEP 585](https://peps.python.org/pep-0585/) (`List[int]` -> `list[int]`) and [PEP 604](https://peps.python.org/pep-0604/)\n(`Union[str, int]` -> `str | int`) rewrites even if a file imports\n`from __future__ import annotations`.\n\nThis setting is only applicable when the target Python version is below\n3.9 and 3.10 respectively, and is most commonly used when working with\nlibraries like Pydantic and FastAPI, which rely on the ability to parse\ntype annotations at runtime. The use of `from __future__ import annotations`\ncauses Python to treat the type annotations as strings, which typically\nallows for the use of language features that appear in later Python\nversions but are not yet supported by the current version (e.g., `str |\nint`). However, libraries that rely on runtime type annotations will\nbreak if the annotations are incompatible with the current Python\nversion.\n\nFor example, while the following is valid Python 3.8 code due to the\npresence of `from __future__ import annotations`, the use of `str | int`\nprior to Python 3.10 will cause Pydantic to raise a `TypeError` at\nruntime:\n\n```python\nfrom __future__ import annotations\n\nimport pydantic\n\nclass Foo(pydantic.BaseModel):\n bar: str | int\n```", "type": [ "boolean", "null" @@ -2629,17 +2609,17 @@ "type": "object", "properties": { "ignore-overlong-task-comments": { - "description": "Whether line-length violations (`E501`) should be triggered for comments starting with [`task-tags`](#lint_task-tags) (by default: \"TODO\", \"FIXME\", and \"XXX\").", + "description": "Whether line-length violations (`E501`) should be triggered for\ncomments starting with [`task-tags`](#lint_task-tags) (by default: \"TODO\", \"FIXME\",\nand \"XXX\").", "type": [ "boolean", "null" ] }, "max-doc-length": { - "description": "The maximum line length to allow for [`doc-line-too-long`](https://docs.astral.sh/ruff/rules/doc-line-too-long/) violations within documentation (`W505`), including standalone comments. By default, this is set to `null` which disables reporting violations.\n\nThe length is determined by the number of characters per line, except for lines containing Asian characters or emojis. For these lines, the [unicode width](https://unicode.org/reports/tr11/) of each character is added up to determine the length.\n\nSee the [`doc-line-too-long`](https://docs.astral.sh/ruff/rules/doc-line-too-long/) rule for more information.", + "description": "The maximum line length to allow for [`doc-line-too-long`](https://docs.astral.sh/ruff/rules/doc-line-too-long/) violations within\ndocumentation (`W505`), including standalone comments. By default,\nthis is set to `null` which disables reporting violations.\n\nThe length is determined by the number of characters per line, except for lines containing Asian characters or emojis.\nFor these lines, the [unicode width](https://unicode.org/reports/tr11/) of each character is added up to determine the length.\n\nSee the [`doc-line-too-long`](https://docs.astral.sh/ruff/rules/doc-line-too-long/) rule for more information.", "anyOf": [ { - "$ref": "#/definitions/LineLength" + "$ref": "#/$defs/LineLength" }, { "type": "null" @@ -2647,10 +2627,10 @@ ] }, "max-line-length": { - "description": "The maximum line length to allow for [`line-too-long`](https://docs.astral.sh/ruff/rules/line-too-long/) violations. By default, this is set to the value of the [`line-length`](#line-length) option.\n\nUse this option when you want to detect extra-long lines that the formatter can't automatically split by setting `pycodestyle.line-length` to a value larger than [`line-length`](#line-length).\n\n```toml # The formatter wraps lines at a length of 88. line-length = 88\n\n[pycodestyle] # E501 reports lines that exceed the length of 100. max-line-length = 100 ```\n\nThe length is determined by the number of characters per line, except for lines containing East Asian characters or emojis. For these lines, the [unicode width](https://unicode.org/reports/tr11/) of each character is added up to determine the length.\n\nSee the [`line-too-long`](https://docs.astral.sh/ruff/rules/line-too-long/) rule for more information.", + "description": "The maximum line length to allow for [`line-too-long`](https://docs.astral.sh/ruff/rules/line-too-long/) violations. By default,\nthis is set to the value of the [`line-length`](#line-length) option.\n\nUse this option when you want to detect extra-long lines that the formatter can't automatically split by setting\n`pycodestyle.line-length` to a value larger than [`line-length`](#line-length).\n\n```toml\n# The formatter wraps lines at a length of 88.\nline-length = 88\n\n[pycodestyle]\n# E501 reports lines that exceed the length of 100.\nmax-line-length = 100\n```\n\nThe length is determined by the number of characters per line, except for lines containing East Asian characters or emojis.\nFor these lines, the [unicode width](https://unicode.org/reports/tr11/) of each character is added up to determine the length.\n\nSee the [`line-too-long`](https://docs.astral.sh/ruff/rules/line-too-long/) rule for more information.", "anyOf": [ { - "$ref": "#/definitions/LineLength" + "$ref": "#/$defs/LineLength" }, { "type": "null" @@ -2665,7 +2645,7 @@ "type": "object", "properties": { "ignore-one-line-docstrings": { - "description": "Skip docstrings which fit on a single line.\n\nNote: The corresponding setting in `pydoclint` is named `skip-checking-short-docstrings`.", + "description": "Skip docstrings which fit on a single line.\n\nNote: The corresponding setting in `pydoclint`\nis named `skip-checking-short-docstrings`.", "type": [ "boolean", "null" @@ -2679,10 +2659,10 @@ "type": "object", "properties": { "convention": { - "description": "Whether to use Google-style, NumPy-style conventions, or the [PEP 257](https://peps.python.org/pep-0257/) defaults when analyzing docstring sections.\n\nEnabling a convention will disable all rules that are not included in the specified convention. As such, the intended workflow is to enable a convention and then selectively enable or disable any additional rules on top of it.\n\nFor example, to use Google-style conventions but avoid requiring documentation for every function parameter:\n\n```toml [tool.ruff.lint] # Enable all `pydocstyle` rules, limiting to those that adhere to the # Google convention via `convention = \"google\"`, below. select = [\"D\"]\n\n# On top of the Google convention, disable `D417`, which requires # documentation for every function parameter. ignore = [\"D417\"]\n\n[tool.ruff.lint.pydocstyle] convention = \"google\" ```\n\nTo enable an additional rule that's excluded from the convention, select the desired rule via its fully qualified rule code (e.g., `D400` instead of `D4` or `D40`):\n\n```toml [tool.ruff.lint] # Enable D400 on top of the Google convention. extend-select = [\"D400\"]\n\n[tool.ruff.lint.pydocstyle] convention = \"google\" ```", + "description": "Whether to use Google-style, NumPy-style conventions, or the [PEP 257](https://peps.python.org/pep-0257/)\ndefaults when analyzing docstring sections.\n\nEnabling a convention will disable all rules that are not included in\nthe specified convention. As such, the intended workflow is to enable a\nconvention and then selectively enable or disable any additional rules\non top of it.\n\nFor example, to use Google-style conventions but avoid requiring\ndocumentation for every function parameter:\n\n```toml\n[tool.ruff.lint]\n# Enable all `pydocstyle` rules, limiting to those that adhere to the\n# Google convention via `convention = \"google\"`, below.\nselect = [\"D\"]\n\n# On top of the Google convention, disable `D417`, which requires\n# documentation for every function parameter.\nignore = [\"D417\"]\n\n[tool.ruff.lint.pydocstyle]\nconvention = \"google\"\n```\n\nTo enable an additional rule that's excluded from the convention,\nselect the desired rule via its fully qualified rule code (e.g.,\n`D400` instead of `D4` or `D40`):\n\n```toml\n[tool.ruff.lint]\n# Enable D400 on top of the Google convention.\nextend-select = [\"D400\"]\n\n[tool.ruff.lint.pydocstyle]\nconvention = \"google\"\n```", "anyOf": [ { - "$ref": "#/definitions/Convention" + "$ref": "#/$defs/Convention" }, { "type": "null" @@ -2690,7 +2670,7 @@ ] }, "ignore-decorators": { - "description": "Ignore docstrings for functions or methods decorated with the specified fully-qualified decorators.", + "description": "Ignore docstrings for functions or methods decorated with the\nspecified fully-qualified decorators.", "type": [ "array", "null" @@ -2707,7 +2687,7 @@ ] }, "property-decorators": { - "description": "A list of decorators that, when applied to a method, indicate that the method should be treated as a property (in addition to the builtin `@property` and standard-library `@functools.cached_property`).\n\nFor example, Ruff will expect that any method decorated by a decorator in this list can use a non-imperative summary line.", + "description": "A list of decorators that, when applied to a method, indicate that the\nmethod should be treated as a property (in addition to the builtin\n`@property` and standard-library `@functools.cached_property`).\n\nFor example, Ruff will expect that any method decorated by a decorator\nin this list can use a non-imperative summary line.", "type": [ "array", "null" @@ -2724,7 +2704,7 @@ "type": "object", "properties": { "allowed-unused-imports": { - "description": "A list of modules to ignore when considering unused imports.\n\nUsed to prevent violations for specific modules that are known to have side effects on import (e.g., `hvplot.pandas`).\n\nModules in this list are expected to be fully-qualified names (e.g., `hvplot.pandas`). Any submodule of a given module will also be ignored (e.g., given `hvplot`, `hvplot.pandas` will also be ignored).", + "description": "A list of modules to ignore when considering unused imports.\n\nUsed to prevent violations for specific modules that are known to have side effects on\nimport (e.g., `hvplot.pandas`).\n\nModules in this list are expected to be fully-qualified names (e.g., `hvplot.pandas`). Any\nsubmodule of a given module will also be ignored (e.g., given `hvplot`, `hvplot.pandas`\nwill also be ignored).", "type": [ "array", "null" @@ -2734,7 +2714,7 @@ } }, "extend-generics": { - "description": "Additional functions or classes to consider generic, such that any subscripts should be treated as type annotation (e.g., `ForeignKey` in `django.db.models.ForeignKey[\"User\"]`.\n\nExpects to receive a list of fully-qualified names (e.g., `django.db.models.ForeignKey`, rather than `ForeignKey`).", + "description": "Additional functions or classes to consider generic, such that any\nsubscripts should be treated as type annotation (e.g., `ForeignKey` in\n`django.db.models.ForeignKey[\"User\"]`.\n\nExpects to receive a list of fully-qualified names (e.g., `django.db.models.ForeignKey`,\nrather than `ForeignKey`).", "type": [ "array", "null" @@ -2751,7 +2731,7 @@ "type": "object", "properties": { "allow-dunder-method-names": { - "description": "Dunder methods name to allow, in addition to the default set from the Python standard library (see `PLW3201`).", + "description": "Dunder methods name to allow, in addition to the default set from the\nPython standard library (see `PLW3201`).", "type": [ "array", "null" @@ -2768,26 +2748,26 @@ "null" ], "items": { - "$ref": "#/definitions/ConstantType" + "$ref": "#/$defs/ConstantType" } }, "max-args": { - "description": "Maximum number of arguments allowed for a function or method definition (see `PLR0913`).", + "description": "Maximum number of arguments allowed for a function or method definition\n(see `PLR0913`).", "type": [ "integer", "null" ], "format": "uint", - "minimum": 0.0 + "minimum": 0 }, "max-bool-expr": { - "description": "Maximum number of Boolean expressions allowed within a single `if` statement (see `PLR0916`).", + "description": "Maximum number of Boolean expressions allowed within a single `if` statement\n(see `PLR0916`).", "type": [ "integer", "null" ], "format": "uint", - "minimum": 0.0 + "minimum": 0 }, "max-branches": { "description": "Maximum number of branches allowed for a function or method body (see `PLR0912`).", @@ -2796,7 +2776,7 @@ "null" ], "format": "uint", - "minimum": 0.0 + "minimum": 0 }, "max-locals": { "description": "Maximum number of local variables allowed for a function or method body (see `PLR0914`).", @@ -2805,25 +2785,25 @@ "null" ], "format": "uint", - "minimum": 0.0 + "minimum": 0 }, "max-nested-blocks": { - "description": "Maximum number of nested blocks allowed within a function or method body (see `PLR1702`).", + "description": "Maximum number of nested blocks allowed within a function or method body\n(see `PLR1702`).", "type": [ "integer", "null" ], "format": "uint", - "minimum": 0.0 + "minimum": 0 }, "max-positional-args": { - "description": "Maximum number of positional arguments allowed for a function or method definition (see `PLR0917`).\n\nIf not specified, defaults to the value of `max-args`.", + "description": "Maximum number of positional arguments allowed for a function or method definition\n(see `PLR0917`).\n\nIf not specified, defaults to the value of `max-args`.", "type": [ "integer", "null" ], "format": "uint", - "minimum": 0.0 + "minimum": 0 }, "max-public-methods": { "description": "Maximum number of public methods allowed for a class (see `PLR0904`).", @@ -2832,16 +2812,16 @@ "null" ], "format": "uint", - "minimum": 0.0 + "minimum": 0 }, "max-returns": { - "description": "Maximum number of return statements allowed for a function or method body (see `PLR0911`)", + "description": "Maximum number of return statements allowed for a function or method\nbody (see `PLR0911`)", "type": [ "integer", "null" ], "format": "uint", - "minimum": 0.0 + "minimum": 0 }, "max-statements": { "description": "Maximum number of statements allowed for a function or method body (see `PLR0915`).", @@ -2850,7 +2830,7 @@ "null" ], "format": "uint", - "minimum": 0.0 + "minimum": 0 } }, "additionalProperties": false @@ -2873,16 +2853,12 @@ { "description": "Use double quotes.", "type": "string", - "enum": [ - "double" - ] + "const": "double" }, { "description": "Use single quotes.", "type": "string", - "enum": [ - "single" - ] + "const": "single" } ] }, @@ -2897,18 +2873,14 @@ "RelativeImportsOrder": { "oneOf": [ { - "description": "Place \"closer\" imports (fewer `.` characters, most local) before \"further\" imports (more `.` characters, least local).", + "description": "Place \"closer\" imports (fewer `.` characters, most local) before\n\"further\" imports (more `.` characters, least local).", "type": "string", - "enum": [ - "closest-to-furthest" - ] + "const": "closest-to-furthest" }, { - "description": "Place \"further\" imports (more `.` characters, least local) imports before \"closer\" imports (fewer `.` characters, most local).", + "description": "Place \"further\" imports (more `.` characters, least local) imports\nbefore \"closer\" imports (fewer `.` characters, most local).", "type": "string", - "enum": [ - "furthest-to-closest" - ] + "const": "furthest-to-closest" } ] }, @@ -2920,29 +2892,29 @@ "type": "object", "properties": { "allowed-markup-calls": { - "description": "A list of callable names, whose result may be safely passed into [`markupsafe.Markup`](https://markupsafe.palletsprojects.com/en/stable/escaping/#markupsafe.Markup).\n\nExpects to receive a list of fully-qualified names (e.g., `bleach.clean`, rather than `clean`).\n\nThis setting helps you avoid false positives in code like:\n\n```python from bleach import clean from markupsafe import Markup\n\ncleaned_markup = Markup(clean(some_user_input)) ```\n\nWhere the use of [`bleach.clean`](https://bleach.readthedocs.io/en/latest/clean.html) usually ensures that there's no XSS vulnerability.\n\nAlthough it is not recommended, you may also use this setting to whitelist other kinds of calls, e.g. calls to i18n translation functions, where how safe that is will depend on the implementation and how well the translations are audited.\n\nAnother common use-case is to wrap the output of functions that generate markup like [`xml.etree.ElementTree.tostring`](https://docs.python.org/3/library/xml.etree.elementtree.html#xml.etree.ElementTree.tostring) or template rendering engines where sanitization of potential user input is either already baked in or has to happen before rendering.", - "deprecated": true, + "description": "A list of callable names, whose result may be safely passed into\n[`markupsafe.Markup`](https://markupsafe.palletsprojects.com/en/stable/escaping/#markupsafe.Markup).\n\nExpects to receive a list of fully-qualified names (e.g., `bleach.clean`, rather than `clean`).\n\nThis setting helps you avoid false positives in code like:\n\n```python\nfrom bleach import clean\nfrom markupsafe import Markup\n\ncleaned_markup = Markup(clean(some_user_input))\n```\n\nWhere the use of [`bleach.clean`](https://bleach.readthedocs.io/en/latest/clean.html)\nusually ensures that there's no XSS vulnerability.\n\nAlthough it is not recommended, you may also use this setting to whitelist other\nkinds of calls, e.g. calls to i18n translation functions, where how safe that is\nwill depend on the implementation and how well the translations are audited.\n\nAnother common use-case is to wrap the output of functions that generate markup\nlike [`xml.etree.ElementTree.tostring`](https://docs.python.org/3/library/xml.etree.elementtree.html#xml.etree.ElementTree.tostring)\nor template rendering engines where sanitization of potential user input is either\nalready baked in or has to happen before rendering.", "type": [ "array", "null" ], + "deprecated": true, "items": { "type": "string" } }, "extend-markup-names": { - "description": "A list of additional callable names that behave like [`markupsafe.Markup`](https://markupsafe.palletsprojects.com/en/stable/escaping/#markupsafe.Markup).\n\nExpects to receive a list of fully-qualified names (e.g., `webhelpers.html.literal`, rather than `literal`).", - "deprecated": true, + "description": "A list of additional callable names that behave like\n[`markupsafe.Markup`](https://markupsafe.palletsprojects.com/en/stable/escaping/#markupsafe.Markup).\n\nExpects to receive a list of fully-qualified names (e.g., `webhelpers.html.literal`, rather than\n`literal`).", "type": [ "array", "null" ], + "deprecated": true, "items": { "type": "string" } }, "parenthesize-tuple-in-subscript": { - "description": "Whether to prefer accessing items keyed by tuples with parentheses around the tuple (see `RUF031`).", + "description": "Whether to prefer accessing items keyed by tuples with\nparentheses around the tuple (see `RUF031`).", "type": [ "boolean", "null" @@ -4368,18 +4340,17 @@ { "description": "Ban imports that extend into the parent module or beyond.", "type": "string", - "enum": [ - "parents" - ] + "const": "parents" }, { "description": "Ban all relative imports.", "type": "string", - "enum": [ - "all" - ] + "const": "all" } ] + }, + "string": { + "type": "string" } } } \ No newline at end of file diff --git a/ty.schema.json b/ty.schema.json index 55cb190bb8c1e7..59c62f6a1ae702 100644 --- a/ty.schema.json +++ b/ty.schema.json @@ -1,5 +1,5 @@ { - "$schema": "http://json-schema.org/draft-07/schema#", + "$schema": "https://json-schema.org/draft/2020-12/schema", "title": "Options", "type": "object", "properties": { @@ -7,7 +7,7 @@ "description": "Configures the type checking environment.", "anyOf": [ { - "$ref": "#/definitions/EnvironmentOptions" + "$ref": "#/$defs/EnvironmentOptions" }, { "type": "null" @@ -15,20 +15,21 @@ ] }, "overrides": { - "description": "Override configurations for specific file patterns.\n\nEach override specifies include/exclude patterns and rule configurations that apply to matching files. Multiple overrides can match the same file, with later overrides taking precedence.", - "type": [ - "array", - "null" - ], - "items": { - "$ref": "#/definitions/OverrideOptions" - } + "description": "Override configurations for specific file patterns.\n\nEach override specifies include/exclude patterns and rule configurations\nthat apply to matching files. Multiple overrides can match the same file,\nwith later overrides taking precedence.", + "anyOf": [ + { + "$ref": "#/$defs/OverridesOptions" + }, + { + "type": "null" + } + ] }, "rules": { - "description": "Configures the enabled rules and their severity.\n\nSee [the rules documentation](https://ty.dev/rules) for a list of all available rules.\n\nValid severities are:\n\n* `ignore`: Disable the rule. * `warn`: Enable the rule and create a warning diagnostic. * `error`: Enable the rule and create an error diagnostic. ty will exit with a non-zero code if any error diagnostics are emitted.", + "description": "Configures the enabled rules and their severity.\n\nSee [the rules documentation](https://ty.dev/rules) for a list of all available rules.\n\nValid severities are:\n\n* `ignore`: Disable the rule.\n* `warn`: Enable the rule and create a warning diagnostic.\n* `error`: Enable the rule and create an error diagnostic.\n ty will exit with a non-zero code if any error diagnostics are emitted.", "anyOf": [ { - "$ref": "#/definitions/Rules" + "$ref": "#/$defs/Rules" }, { "type": "null" @@ -38,7 +39,7 @@ "src": { "anyOf": [ { - "$ref": "#/definitions/SrcOptions" + "$ref": "#/$defs/SrcOptions" }, { "type": "null" @@ -48,7 +49,7 @@ "terminal": { "anyOf": [ { - "$ref": "#/definitions/TerminalOptions" + "$ref": "#/$defs/TerminalOptions" }, { "type": "null" @@ -57,32 +58,42 @@ } }, "additionalProperties": false, - "definitions": { + "$defs": { + "Array_of_string": { + "type": "array", + "items": { + "$ref": "#/$defs/string" + } + }, "EnvironmentOptions": { "type": "object", "properties": { "extra-paths": { - "description": "User-provided paths that should take first priority in module resolution.\n\nThis is an advanced option that should usually only be used for first-party or third-party modules that are not installed into your Python environment in a conventional way. Use the `python` option to specify the location of your Python environment.\n\nThis option is similar to mypy's `MYPYPATH` environment variable and pyright's `stubPath` configuration setting.", + "description": "User-provided paths that should take first priority in module resolution.\n\nThis is an advanced option that should usually only be used for first-party or third-party\nmodules that are not installed into your Python environment in a conventional way.\nUse the `python` option to specify the location of your Python environment.\n\nThis option is similar to mypy's `MYPYPATH` environment variable and pyright's `stubPath`\nconfiguration setting.", "type": [ "array", "null" ], "items": { - "type": "string" + "$ref": "#/$defs/RelativePathBuf" } }, "python": { - "description": "Path to your project's Python environment or interpreter.\n\nty uses the `site-packages` directory of your project's Python environment to resolve third-party (and, in some cases, first-party) imports in your code.\n\nIf you're using a project management tool such as uv, you should not generally need to specify this option, as commands such as `uv run` will set the `VIRTUAL_ENV` environment variable to point to your project's virtual environment. ty can also infer the location of your environment from an activated Conda environment, and will look for a `.venv` directory in the project root if none of the above apply.\n\nPassing a path to a Python executable is supported, but passing a path to a dynamic executable (such as a shim) is not currently supported.\n\nThis option can be used to point to virtual or system Python environments.", - "type": [ - "string", - "null" + "description": "Path to your project's Python environment or interpreter.\n\nty uses the `site-packages` directory of your project's Python environment\nto resolve third-party (and, in some cases, first-party) imports in your code.\n\nIf you're using a project management tool such as uv, you should not generally need\nto specify this option, as commands such as `uv run` will set the `VIRTUAL_ENV`\nenvironment variable to point to your project's virtual environment. ty can also infer\nthe location of your environment from an activated Conda environment, and will look for\na `.venv` directory in the project root if none of the above apply.\n\nPassing a path to a Python executable is supported, but passing a path to a dynamic executable\n(such as a shim) is not currently supported.\n\nThis option can be used to point to virtual or system Python environments.", + "anyOf": [ + { + "$ref": "#/$defs/RelativePathBuf" + }, + { + "type": "null" + } ] }, "python-platform": { - "description": "Specifies the target platform that will be used to analyze the source code. If specified, ty will understand conditions based on comparisons with `sys.platform`, such as are commonly found in typeshed to reflect the differing contents of the standard library across platforms. If `all` is specified, ty will assume that the source code can run on any platform.\n\nIf no platform is specified, ty will use the current platform: - `win32` for Windows - `darwin` for macOS - `android` for Android - `ios` for iOS - `linux` for everything else", + "description": "Specifies the target platform that will be used to analyze the source code.\nIf specified, ty will understand conditions based on comparisons with `sys.platform`, such\nas are commonly found in typeshed to reflect the differing contents of the standard library across platforms.\nIf `all` is specified, ty will assume that the source code can run on any platform.\n\nIf no platform is specified, ty will use the current platform:\n- `win32` for Windows\n- `darwin` for macOS\n- `android` for Android\n- `ios` for iOS\n- `linux` for everything else", "anyOf": [ { - "$ref": "#/definitions/PythonPlatform" + "$ref": "#/$defs/PythonPlatform" }, { "type": "null" @@ -90,10 +101,10 @@ ] }, "python-version": { - "description": "Specifies the version of Python that will be used to analyze the source code. The version should be specified as a string in the format `M.m` where `M` is the major version and `m` is the minor (e.g. `\"3.0\"` or `\"3.6\"`). If a version is provided, ty will generate errors if the source code makes use of language features that are not supported in that version.\n\nIf a version is not specified, ty will try the following techniques in order of preference to determine a value: 1. Check for the `project.requires-python` setting in a `pyproject.toml` file and use the minimum version from the specified range 2. Check for an activated or configured Python environment and attempt to infer the Python version of that environment 3. Fall back to the default value (see below)\n\nFor some language features, ty can also understand conditionals based on comparisons with `sys.version_info`. These are commonly found in typeshed, for example, to reflect the differing contents of the standard library across Python versions.", + "description": "Specifies the version of Python that will be used to analyze the source code.\nThe version should be specified as a string in the format `M.m` where `M` is the major version\nand `m` is the minor (e.g. `\"3.0\"` or `\"3.6\"`).\nIf a version is provided, ty will generate errors if the source code makes use of language features\nthat are not supported in that version.\n\nIf a version is not specified, ty will try the following techniques in order of preference\nto determine a value:\n1. Check for the `project.requires-python` setting in a `pyproject.toml` file\n and use the minimum version from the specified range\n2. Check for an activated or configured Python environment\n and attempt to infer the Python version of that environment\n3. Fall back to the default value (see below)\n\nFor some language features, ty can also understand conditionals based on comparisons\nwith `sys.version_info`. These are commonly found in typeshed, for example,\nto reflect the differing contents of the standard library across Python versions.", "anyOf": [ { - "$ref": "#/definitions/PythonVersion" + "$ref": "#/$defs/PythonVersion" }, { "type": "null" @@ -101,20 +112,24 @@ ] }, "root": { - "description": "The root paths of the project, used for finding first-party modules.\n\nAccepts a list of directory paths searched in priority order (first has highest priority).\n\nIf left unspecified, ty will try to detect common project layouts and initialize `root` accordingly:\n\n* if a `./src` directory exists, include `.` and `./src` in the first party search path (src layout or flat) * if a `.//` directory exists, include `.` and `./` in the first party search path * otherwise, default to `.` (flat layout)\n\nBesides, if a `./python` or `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` or `__init__.pyi` file), it will also be included in the first party search path.", + "description": "The root paths of the project, used for finding first-party modules.\n\nAccepts a list of directory paths searched in priority order (first has highest priority).\n\nIf left unspecified, ty will try to detect common project layouts and initialize `root` accordingly:\n\n* if a `./src` directory exists, include `.` and `./src` in the first party search path (src layout or flat)\n* if a `.//` directory exists, include `.` and `./` in the first party search path\n* otherwise, default to `.` (flat layout)\n\nBesides, if a `./python` or `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` or `__init__.pyi` file),\nit will also be included in the first party search path.", "type": [ "array", "null" ], "items": { - "type": "string" + "$ref": "#/$defs/RelativePathBuf" } }, "typeshed": { - "description": "Optional path to a \"typeshed\" directory on disk for us to use for standard-library types. If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib, bundled as a zip file in the binary", - "type": [ - "string", - "null" + "description": "Optional path to a \"typeshed\" directory on disk for us to use for standard-library types.\nIf this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,\nbundled as a zip file in the binary", + "anyOf": [ + { + "$ref": "#/$defs/RelativePathBuf" + }, + { + "type": "null" + } ] } }, @@ -126,25 +141,19 @@ "title": "Ignore", "description": "The lint is disabled and should not run.", "type": "string", - "enum": [ - "ignore" - ] + "const": "ignore" }, { "title": "Warn", "description": "The lint is enabled and diagnostic should have a warning severity.", "type": "string", - "enum": [ - "warn" - ] + "const": "warn" }, { "title": "Error", "description": "The lint is enabled and diagnostics have an error severity.", "type": "string", - "enum": [ - "error" - ] + "const": "error" } ] }, @@ -152,32 +161,24 @@ "description": "The diagnostic output format.", "oneOf": [ { - "description": "The default full mode will print \"pretty\" diagnostics.\n\nThat is, color will be used when printing to a `tty`. Moreover, diagnostic messages may include additional context and annotations on the input to help understand the message.", + "description": "The default full mode will print \"pretty\" diagnostics.\n\nThat is, color will be used when printing to a `tty`.\nMoreover, diagnostic messages may include additional\ncontext and annotations on the input to help understand\nthe message.", "type": "string", - "enum": [ - "full" - ] + "const": "full" }, { - "description": "Print diagnostics in a concise mode.\n\nThis will guarantee that each diagnostic is printed on a single line. Only the most important or primary aspects of the diagnostic are included. Contextual information is dropped.\n\nThis may use color when printing to a `tty`.", + "description": "Print diagnostics in a concise mode.\n\nThis will guarantee that each diagnostic is printed on\na single line. Only the most important or primary aspects\nof the diagnostic are included. Contextual information is\ndropped.\n\nThis may use color when printing to a `tty`.", "type": "string", - "enum": [ - "concise" - ] + "const": "concise" }, { "description": "Print diagnostics in the JSON format expected by GitLab [Code Quality] reports.\n\n[Code Quality]: https://docs.gitlab.com/ci/testing/code_quality/#code-quality-report-format", "type": "string", - "enum": [ - "gitlab" - ] + "const": "gitlab" }, { "description": "Print diagnostics in the format used by [GitHub Actions] workflow error annotations.\n\n[GitHub Actions]: https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-commands#setting-an-error-message", "type": "string", - "enum": [ - "github" - ] + "const": "github" } ] }, @@ -185,30 +186,32 @@ "type": "object", "properties": { "exclude": { - "description": "A list of file and directory patterns to exclude from this override.\n\nPatterns follow a syntax similar to `.gitignore`. Exclude patterns take precedence over include patterns within the same override.\n\nIf not specified, defaults to `[]` (excludes no files).", - "type": [ - "array", - "null" - ], - "items": { - "type": "string" - } + "description": "A list of file and directory patterns to exclude from this override.\n\nPatterns follow a syntax similar to `.gitignore`.\nExclude patterns take precedence over include patterns within the same override.\n\nIf not specified, defaults to `[]` (excludes no files).", + "anyOf": [ + { + "$ref": "#/$defs/Array_of_string" + }, + { + "type": "null" + } + ] }, "include": { - "description": "A list of file and directory patterns to include for this override.\n\nThe `include` option follows a similar syntax to `.gitignore` but reversed: Including a file or directory will make it so that it (and its contents) are affected by this override.\n\nIf not specified, defaults to `[\"**\"]` (matches all files).", - "type": [ - "array", - "null" - ], - "items": { - "type": "string" - } + "description": "A list of file and directory patterns to include for this override.\n\nThe `include` option follows a similar syntax to `.gitignore` but reversed:\nIncluding a file or directory will make it so that it (and its contents)\nare affected by this override.\n\nIf not specified, defaults to `[\"**\"]` (matches all files).", + "anyOf": [ + { + "$ref": "#/$defs/Array_of_string" + }, + { + "type": "null" + } + ] }, "rules": { - "description": "Rule overrides for files matching the include/exclude patterns.\n\nThese rules will be merged with the global rules, with override rules taking precedence for matching files. You can set rules to different severity levels or disable them entirely.", + "description": "Rule overrides for files matching the include/exclude patterns.\n\nThese rules will be merged with the global rules, with override rules\ntaking precedence for matching files. You can set rules to different\nseverity levels or disable them entirely.", "anyOf": [ { - "$ref": "#/definitions/Rules" + "$ref": "#/$defs/Rules" }, { "type": "null" @@ -218,6 +221,13 @@ }, "additionalProperties": false }, + "OverridesOptions": { + "description": "Configuration override that applies to specific files based on glob patterns.\n\nAn override allows you to apply different rule configurations to specific\nfiles or directories. Multiple overrides can match the same file, with\nlater overrides take precedence.\n\n### Precedence\n\n- Later overrides in the array take precedence over earlier ones\n- Override rules take precedence over global rules for matching files\n\n### Examples\n\n```toml\n# Relax rules for test files\n[[tool.ty.overrides]]\ninclude = [\"tests/**\", \"**/test_*.py\"]\n\n[tool.ty.overrides.rules]\npossibly-unresolved-reference = \"warn\"\n\n# Ignore generated files but still check important ones\n[[tool.ty.overrides]]\ninclude = [\"generated/**\"]\nexclude = [\"generated/important.py\"]\n\n[tool.ty.overrides.rules]\npossibly-unresolved-reference = \"ignore\"\n```", + "type": "array", + "items": { + "$ref": "#/$defs/OverrideOptions" + } + }, "PythonPlatform": { "description": "The target platform to assume when resolving types.\n", "anyOf": [ @@ -246,7 +256,7 @@ "anyOf": [ { "type": "string", - "pattern": "^\\d+\\.\\d+$" + "pattern": "^\\\\d+\\\\.\\\\d+$" }, { "description": "Python 3.7", @@ -282,6 +292,10 @@ } ] }, + "RelativePathBuf": { + "description": "A possibly relative path in a configuration file.\n\nRelative paths in configuration files or from CLI options\nrequire different anchoring:\n\n* CLI: The path is relative to the current working directory\n* Configuration file: The path is relative to the project's root.", + "$ref": "#/$defs/SystemPathBuf" + }, "Rules": { "type": "object", "properties": { @@ -291,7 +305,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -301,7 +315,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -311,7 +325,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -321,7 +335,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -331,7 +345,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -341,7 +355,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -351,7 +365,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -361,7 +375,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -371,7 +385,7 @@ "default": "ignore", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -381,7 +395,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -391,7 +405,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -401,7 +415,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -411,7 +425,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -421,7 +435,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -431,7 +445,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -441,7 +455,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -451,7 +465,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -461,7 +475,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -471,7 +485,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -481,7 +495,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -491,7 +505,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -501,7 +515,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -511,7 +525,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -521,7 +535,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -531,7 +545,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -541,7 +555,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -551,7 +565,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -561,7 +575,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -571,7 +585,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -581,7 +595,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -591,7 +605,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -601,7 +615,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -611,7 +625,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -621,7 +635,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -631,7 +645,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -641,7 +655,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -651,7 +665,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -661,7 +675,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -671,7 +685,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -681,7 +695,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -691,7 +705,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -701,7 +715,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -711,7 +725,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -721,7 +735,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -731,7 +745,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -741,7 +755,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -751,7 +765,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -761,7 +775,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -771,7 +785,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -781,7 +795,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -791,7 +805,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -801,7 +815,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -811,7 +825,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -821,7 +835,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -831,7 +845,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -841,7 +855,7 @@ "default": "ignore", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -851,7 +865,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -861,7 +875,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -871,7 +885,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -881,7 +895,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -891,7 +905,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -901,7 +915,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -911,7 +925,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -921,7 +935,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -931,7 +945,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -941,7 +955,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -951,7 +965,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -961,7 +975,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -971,7 +985,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -981,7 +995,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -991,7 +1005,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -1001,7 +1015,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -1011,7 +1025,7 @@ "default": "ignore", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -1021,7 +1035,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] }, @@ -1031,56 +1045,66 @@ "default": "error", "oneOf": [ { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } ] } }, "additionalProperties": { - "$ref": "#/definitions/Level" + "$ref": "#/$defs/Level" } }, "SrcOptions": { "type": "object", "properties": { "exclude": { - "description": "A list of file and directory patterns to exclude from type checking.\n\nPatterns follow a syntax similar to `.gitignore`:\n\n- `./src/` matches only a directory - `./src` matches both files and directories - `src` matches files or directories named `src` - `*` matches any (possibly empty) sequence of characters (except `/`). - `**` matches zero or more path components. This sequence **must** form a single path component, so both `**a` and `b**` are invalid and will result in an error. A sequence of more than two consecutive `*` characters is also invalid. - `?` matches any single character except `/` - `[abc]` matches any character inside the brackets. Character sequences can also specify ranges of characters, as ordered by Unicode, so e.g. `[0-9]` specifies any character between `0` and `9` inclusive. An unclosed bracket is invalid. - `!pattern` negates a pattern (undoes the exclusion of files that would otherwise be excluded)\n\nAll paths are anchored relative to the project root (`src` only matches `/src` and not `/test/src`). To exclude any directory or file named `src`, use `**/src` instead.\n\nBy default, ty excludes commonly ignored directories:\n\n- `**/.bzr/` - `**/.direnv/` - `**/.eggs/` - `**/.git/` - `**/.git-rewrite/` - `**/.hg/` - `**/.mypy_cache/` - `**/.nox/` - `**/.pants.d/` - `**/.pytype/` - `**/.ruff_cache/` - `**/.svn/` - `**/.tox/` - `**/.venv/` - `**/__pypackages__/` - `**/_build/` - `**/buck-out/` - `**/dist/` - `**/node_modules/` - `**/venv/`\n\nYou can override any default exclude by using a negated pattern. For example, to re-include `dist` use `exclude = [\"!dist\"]`", - "type": [ - "array", - "null" - ], - "items": { - "type": "string" - } + "description": "A list of file and directory patterns to exclude from type checking.\n\nPatterns follow a syntax similar to `.gitignore`:\n\n- `./src/` matches only a directory\n- `./src` matches both files and directories\n- `src` matches files or directories named `src`\n- `*` matches any (possibly empty) sequence of characters (except `/`).\n- `**` matches zero or more path components.\n This sequence **must** form a single path component, so both `**a` and `b**` are invalid and will result in an error.\n A sequence of more than two consecutive `*` characters is also invalid.\n- `?` matches any single character except `/`\n- `[abc]` matches any character inside the brackets. Character sequences can also specify ranges of characters, as ordered by Unicode,\n so e.g. `[0-9]` specifies any character between `0` and `9` inclusive. An unclosed bracket is invalid.\n- `!pattern` negates a pattern (undoes the exclusion of files that would otherwise be excluded)\n\nAll paths are anchored relative to the project root (`src` only\nmatches `/src` and not `/test/src`).\nTo exclude any directory or file named `src`, use `**/src` instead.\n\nBy default, ty excludes commonly ignored directories:\n\n- `**/.bzr/`\n- `**/.direnv/`\n- `**/.eggs/`\n- `**/.git/`\n- `**/.git-rewrite/`\n- `**/.hg/`\n- `**/.mypy_cache/`\n- `**/.nox/`\n- `**/.pants.d/`\n- `**/.pytype/`\n- `**/.ruff_cache/`\n- `**/.svn/`\n- `**/.tox/`\n- `**/.venv/`\n- `**/__pypackages__/`\n- `**/_build/`\n- `**/buck-out/`\n- `**/dist/`\n- `**/node_modules/`\n- `**/venv/`\n\nYou can override any default exclude by using a negated pattern. For example,\nto re-include `dist` use `exclude = [\"!dist\"]`", + "anyOf": [ + { + "$ref": "#/$defs/Array_of_string" + }, + { + "type": "null" + } + ] }, "include": { - "description": "A list of files and directories to check. The `include` option follows a similar syntax to `.gitignore` but reversed: Including a file or directory will make it so that it (and its contents) are type checked.\n\n- `./src/` matches only a directory - `./src` matches both files and directories - `src` matches a file or directory named `src` - `*` matches any (possibly empty) sequence of characters (except `/`). - `**` matches zero or more path components. This sequence **must** form a single path component, so both `**a` and `b**` are invalid and will result in an error. A sequence of more than two consecutive `*` characters is also invalid. - `?` matches any single character except `/` - `[abc]` matches any character inside the brackets. Character sequences can also specify ranges of characters, as ordered by Unicode, so e.g. `[0-9]` specifies any character between `0` and `9` inclusive. An unclosed bracket is invalid.\n\nAll paths are anchored relative to the project root (`src` only matches `/src` and not `/test/src`).\n\n`exclude` takes precedence over `include`.", - "type": [ - "array", - "null" - ], - "items": { - "type": "string" - } + "description": "A list of files and directories to check. The `include` option\nfollows a similar syntax to `.gitignore` but reversed:\nIncluding a file or directory will make it so that it (and its contents)\nare type checked.\n\n- `./src/` matches only a directory\n- `./src` matches both files and directories\n- `src` matches a file or directory named `src`\n- `*` matches any (possibly empty) sequence of characters (except `/`).\n- `**` matches zero or more path components.\n This sequence **must** form a single path component, so both `**a` and `b**` are invalid and will result in an error.\n A sequence of more than two consecutive `*` characters is also invalid.\n- `?` matches any single character except `/`\n- `[abc]` matches any character inside the brackets. Character sequences can also specify ranges of characters, as ordered by Unicode,\n so e.g. `[0-9]` specifies any character between `0` and `9` inclusive. An unclosed bracket is invalid.\n\nAll paths are anchored relative to the project root (`src` only\nmatches `/src` and not `/test/src`).\n\n`exclude` takes precedence over `include`.", + "anyOf": [ + { + "$ref": "#/$defs/Array_of_string" + }, + { + "type": "null" + } + ] }, "respect-ignore-files": { - "description": "Whether to automatically exclude files that are ignored by `.ignore`, `.gitignore`, `.git/info/exclude`, and global `gitignore` files. Enabled by default.", + "description": "Whether to automatically exclude files that are ignored by `.ignore`,\n`.gitignore`, `.git/info/exclude`, and global `gitignore` files.\nEnabled by default.", "type": [ "boolean", "null" ] }, "root": { - "description": "The root of the project, used for finding first-party modules.\n\nIf left unspecified, ty will try to detect common project layouts and initialize `src.root` accordingly:\n\n* if a `./src` directory exists, include `.` and `./src` in the first party search path (src layout or flat) * if a `.//` directory exists, include `.` and `./` in the first party search path * otherwise, default to `.` (flat layout)\n\nBesides, if a `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` file), it will also be included in the first party search path.", - "deprecated": true, - "type": [ - "string", - "null" - ] + "description": "The root of the project, used for finding first-party modules.\n\nIf left unspecified, ty will try to detect common project layouts and initialize `src.root` accordingly:\n\n* if a `./src` directory exists, include `.` and `./src` in the first party search path (src layout or flat)\n* if a `.//` directory exists, include `.` and `./` in the first party search path\n* otherwise, default to `.` (flat layout)\n\nBesides, if a `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` file),\nit will also be included in the first party search path.", + "anyOf": [ + { + "$ref": "#/$defs/RelativePathBuf" + }, + { + "type": "null" + } + ], + "deprecated": true } }, "additionalProperties": false }, + "SystemPathBuf": { + "description": "An owned, mutable path on [`System`](`super::System`) (akin to [`String`]).\n\nThe path is guaranteed to be valid UTF-8.", + "type": "string" + }, "TerminalOptions": { "type": "object", "properties": { @@ -1095,7 +1119,7 @@ "description": "The format to use for printing diagnostic messages.\n\nDefaults to `full`.", "anyOf": [ { - "$ref": "#/definitions/OutputFormat" + "$ref": "#/$defs/OutputFormat" }, { "type": "null" @@ -1104,6 +1128,9 @@ } }, "additionalProperties": false + }, + "string": { + "type": "string" } } } \ No newline at end of file From e84a8f081105d1f0b15656fa0b8c1c148b785855 Mon Sep 17 00:00:00 2001 From: Takayuki Maeda Date: Mon, 20 Oct 2025 15:37:06 +0900 Subject: [PATCH 2/4] use draft07 --- crates/ruff_dev/src/generate_json_schema.rs | 6 ++++-- crates/ruff_dev/src/generate_ty_schema.rs | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/crates/ruff_dev/src/generate_json_schema.rs b/crates/ruff_dev/src/generate_json_schema.rs index 61d616cfb4b224..239f6758285d9f 100644 --- a/crates/ruff_dev/src/generate_json_schema.rs +++ b/crates/ruff_dev/src/generate_json_schema.rs @@ -3,7 +3,7 @@ use std::path::PathBuf; use anyhow::{Result, bail}; use pretty_assertions::StrComparison; -use schemars::schema_for; +use schemars::generate::SchemaSettings; use crate::ROOT_DIR; use crate::generate_all::{Mode, REGENERATE_ALL_COMMAND}; @@ -17,7 +17,9 @@ pub(crate) struct Args { } pub(crate) fn main(args: &Args) -> Result<()> { - let schema = schema_for!(Options); + let settings = SchemaSettings::draft07(); + let generator = settings.into_generator(); + let schema = generator.into_root_schema_for::(); let schema_string = serde_json::to_string_pretty(&schema).unwrap(); let filename = "ruff.schema.json"; let schema_path = PathBuf::from(ROOT_DIR).join(filename); diff --git a/crates/ruff_dev/src/generate_ty_schema.rs b/crates/ruff_dev/src/generate_ty_schema.rs index eac09963b36ea4..e819e91d104ce4 100644 --- a/crates/ruff_dev/src/generate_ty_schema.rs +++ b/crates/ruff_dev/src/generate_ty_schema.rs @@ -3,7 +3,7 @@ use std::path::PathBuf; use anyhow::{Result, bail}; use pretty_assertions::StrComparison; -use schemars::schema_for; +use schemars::generate::SchemaSettings; use crate::ROOT_DIR; use crate::generate_all::{Mode, REGENERATE_ALL_COMMAND}; @@ -17,7 +17,9 @@ pub(crate) struct Args { } pub(crate) fn main(args: &Args) -> Result<()> { - let schema = schema_for!(Options); + let settings = SchemaSettings::draft07(); + let generator = settings.into_generator(); + let schema = generator.into_root_schema_for::(); let schema_string = serde_json::to_string_pretty(&schema).unwrap(); let filename = "ty.schema.json"; let schema_path = PathBuf::from(ROOT_DIR).join(filename); From 8b53c95f3d753c0ab5703003f3e385c9ac59ebd4 Mon Sep 17 00:00:00 2001 From: Takayuki Maeda Date: Mon, 20 Oct 2025 15:38:10 +0900 Subject: [PATCH 3/4] preserve a comment --- crates/ruff_python_ast/src/name.rs | 30 +-- crates/ruff_python_ast/src/python_version.rs | 14 +- crates/ruff_workspace/src/options.rs | 9 +- crates/ty_project/src/metadata/options.rs | 108 ++++---- .../ty_python_semantic/src/python_platform.rs | 2 + ruff.schema.json | 239 +++++++++--------- ty.schema.json | 204 +++++++-------- 7 files changed, 293 insertions(+), 313 deletions(-) diff --git a/crates/ruff_python_ast/src/name.rs b/crates/ruff_python_ast/src/name.rs index 143aa6129ab17d..a4d8fe46c9dd35 100644 --- a/crates/ruff_python_ast/src/name.rs +++ b/crates/ruff_python_ast/src/name.rs @@ -11,6 +11,11 @@ use crate::generated::ExprName; #[cfg_attr(feature = "cache", derive(ruff_macros::CacheKey))] #[cfg_attr(feature = "salsa", derive(salsa::Update))] #[cfg_attr(feature = "get-size", derive(get_size2::GetSize))] +#[cfg_attr( + feature = "schemars", + derive(schemars::JsonSchema), + schemars(with = "String") +)] pub struct Name(compact_str::CompactString); impl Name { @@ -201,31 +206,6 @@ impl PartialEq for &String { } } -#[cfg(feature = "schemars")] -impl schemars::JsonSchema for Name { - fn schema_name() -> std::borrow::Cow<'static, str> { - ::schema_name() - } - - fn schema_id() -> std::borrow::Cow<'static, str> { - ::schema_id() - } - - fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema { - ::json_schema(generator) - } - - fn _schemars_private_non_optional_json_schema( - generator: &mut schemars::SchemaGenerator, - ) -> schemars::Schema { - ::_schemars_private_non_optional_json_schema(generator) - } - - fn _schemars_private_is_option() -> bool { - ::_schemars_private_is_option() - } -} - /// A representation of a qualified name, like `typing.List`. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct QualifiedName<'a>(SegmentsVec<'a>); diff --git a/crates/ruff_python_ast/src/python_version.rs b/crates/ruff_python_ast/src/python_version.rs index 71fb99f27a528d..6d472d9ab47558 100644 --- a/crates/ruff_python_ast/src/python_version.rs +++ b/crates/ruff_python_ast/src/python_version.rs @@ -197,13 +197,13 @@ mod schemars { } fn json_schema(_gen: &mut SchemaGenerator) -> Schema { - let string_with_pattern = schemars::json_schema!({ - "type": "string", - "pattern": r"^\\d+\\.\\d+$", - }); - - let mut any_of: Vec = Vec::new(); - any_of.push(string_with_pattern.into()); + let mut any_of: Vec = vec![ + schemars::json_schema!({ + "type": "string", + "pattern": r"^\d+\.\d+$", + }) + .into(), + ]; for version in Self::iter() { let mut schema = schemars::json_schema!({ diff --git a/crates/ruff_workspace/src/options.rs b/crates/ruff_workspace/src/options.rs index 83f6b4866efc81..5fd4cf0e626a94 100644 --- a/crates/ruff_workspace/src/options.rs +++ b/crates/ruff_workspace/src/options.rs @@ -3938,8 +3938,8 @@ pub struct LintOptionsWire { pyflakes: Option, pylint: Option, pyupgrade: Option, - per_file_ignores: Option>>, - extend_per_file_ignores: Option>>, + per_file_ignores: Option>>, + extend_per_file_ignores: Option>>, exclude: Option>, pydoclint: Option, @@ -4005,11 +4005,6 @@ impl From for LintOptions { future_annotations, } = value; - let per_file_ignores = - per_file_ignores.map(|map| map.into_iter().collect::>()); - let extend_per_file_ignores = - extend_per_file_ignores.map(|map| map.into_iter().collect::>()); - LintOptions { #[expect(deprecated)] common: LintCommonOptions { diff --git a/crates/ty_project/src/metadata/options.rs b/crates/ty_project/src/metadata/options.rs index 2245bfa16d4fbd..bcd821c53d141f 100644 --- a/crates/ty_project/src/metadata/options.rs +++ b/crates/ty_project/src/metadata/options.rs @@ -799,59 +799,6 @@ impl FromIterator<(RangedValue, RangedValue)> for Rules { } } -#[cfg(feature = "schemars")] -impl schemars::JsonSchema for Rules { - fn schema_name() -> std::borrow::Cow<'static, str> { - std::borrow::Cow::Borrowed("Rules") - } - - fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema { - use serde_json::{Map, Value}; - - let registry = ty_python_semantic::default_lint_registry(); - let level_schema = generator.subschema_for::(); - - let properties: Map = registry - .lints() - .iter() - .map(|lint| { - let mut schema = schemars::Schema::default(); - let object = schema.ensure_object(); - object.insert( - "title".to_string(), - Value::String(lint.summary().to_string()), - ); - object.insert( - "description".to_string(), - Value::String(lint.documentation()), - ); - if lint.status.is_deprecated() { - object.insert("deprecated".to_string(), Value::Bool(true)); - } - object.insert( - "default".to_string(), - Value::String(lint.default_level.to_string()), - ); - object.insert( - "oneOf".to_string(), - Value::Array(vec![level_schema.clone().into()]), - ); - - (lint.name().to_string(), schema.into()) - }) - .collect(); - - let mut schema = schemars::json_schema!({ "type": "object" }); - let object = schema.ensure_object(); - object.insert("properties".to_string(), Value::Object(properties)); - // Allow unknown rules: ty will warn about them. It gives a better experience when using an older - // ty version because the schema will not deny rules that have been removed in newer versions. - object.insert("additionalProperties".to_string(), level_schema.into()); - - schema - } -} - impl Rules { /// Convert the rules to a `RuleSelection` with diagnostics. pub fn to_rule_selection( @@ -1584,6 +1531,61 @@ impl Display for ToSettingsError { impl std::error::Error for ToSettingsError {} +#[cfg(feature = "schemars")] +mod schema { + impl schemars::JsonSchema for super::Rules { + fn schema_name() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed("Rules") + } + + fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema { + use serde_json::{Map, Value}; + + let registry = ty_python_semantic::default_lint_registry(); + let level_schema = generator.subschema_for::(); + + let properties: Map = registry + .lints() + .iter() + .map(|lint| { + let mut schema = schemars::Schema::default(); + let object = schema.ensure_object(); + object.insert( + "title".to_string(), + Value::String(lint.summary().to_string()), + ); + object.insert( + "description".to_string(), + Value::String(lint.documentation()), + ); + if lint.status.is_deprecated() { + object.insert("deprecated".to_string(), Value::Bool(true)); + } + object.insert( + "default".to_string(), + Value::String(lint.default_level.to_string()), + ); + object.insert( + "oneOf".to_string(), + Value::Array(vec![level_schema.clone().into()]), + ); + + (lint.name().to_string(), schema.into()) + }) + .collect(); + + let mut schema = schemars::json_schema!({ "type": "object" }); + let object = schema.ensure_object(); + object.insert("properties".to_string(), Value::Object(properties)); + // Allow unknown rules: ty will warn about them. It gives a better experience when using an older + // ty version because the schema will not deny rules that have been removed in newer versions. + object.insert("additionalProperties".to_string(), level_schema.into()); + + schema + } + } +} + #[derive(Error, Debug)] pub enum TyTomlError { #[error(transparent)] diff --git a/crates/ty_python_semantic/src/python_platform.rs b/crates/ty_python_semantic/src/python_platform.rs index 33c60d6a48692a..017cc4d21b60d9 100644 --- a/crates/ty_python_semantic/src/python_platform.rs +++ b/crates/ty_python_semantic/src/python_platform.rs @@ -77,6 +77,8 @@ mod schema { } let mut any_of = vec![schemars::json_schema!({ "type": "string" }).into()]; + // Promote well-known values for better auto-completion. + // Using `const` over `enumValues` as recommended [here](https://github.com/SchemaStore/schemastore/blob/master/CONTRIBUTING.md#documenting-enums). any_of.push(constant( "all", "Do not make any assumptions about the target platform.", diff --git a/ruff.schema.json b/ruff.schema.json index 74783d4b324cb1..1917af7f6d747b 100644 --- a/ruff.schema.json +++ b/ruff.schema.json @@ -1,5 +1,5 @@ { - "$schema": "https://json-schema.org/draft/2020-12/schema", + "$schema": "http://json-schema.org/draft-07/schema#", "title": "Options", "type": "object", "properties": { @@ -20,7 +20,7 @@ "description": "Options to configure import map generation.", "anyOf": [ { - "$ref": "#/$defs/AnalyzeOptions" + "$ref": "#/definitions/AnalyzeOptions" }, { "type": "null" @@ -95,7 +95,7 @@ ], "deprecated": true, "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "extend-ignore": { @@ -106,7 +106,7 @@ ], "deprecated": true, "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "extend-include": { @@ -128,7 +128,7 @@ "additionalProperties": { "type": "array", "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "deprecated": true @@ -141,7 +141,7 @@ ], "deprecated": true, "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "extend-select": { @@ -152,7 +152,7 @@ ], "deprecated": true, "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "extend-unfixable": { @@ -163,7 +163,7 @@ ], "deprecated": true, "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "extend-unsafe-fixes": { @@ -174,7 +174,7 @@ ], "deprecated": true, "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "external": { @@ -210,14 +210,14 @@ ], "deprecated": true, "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "flake8-annotations": { "description": "Options for the `flake8-annotations` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8AnnotationsOptions" + "$ref": "#/definitions/Flake8AnnotationsOptions" }, { "type": "null" @@ -229,7 +229,7 @@ "description": "Options for the `flake8-bandit` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8BanditOptions" + "$ref": "#/definitions/Flake8BanditOptions" }, { "type": "null" @@ -241,7 +241,7 @@ "description": "Options for the `flake8-boolean-trap` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8BooleanTrapOptions" + "$ref": "#/definitions/Flake8BooleanTrapOptions" }, { "type": "null" @@ -253,7 +253,7 @@ "description": "Options for the `flake8-bugbear` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8BugbearOptions" + "$ref": "#/definitions/Flake8BugbearOptions" }, { "type": "null" @@ -265,7 +265,7 @@ "description": "Options for the `flake8-builtins` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8BuiltinsOptions" + "$ref": "#/definitions/Flake8BuiltinsOptions" }, { "type": "null" @@ -277,7 +277,7 @@ "description": "Options for the `flake8-comprehensions` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8ComprehensionsOptions" + "$ref": "#/definitions/Flake8ComprehensionsOptions" }, { "type": "null" @@ -289,7 +289,7 @@ "description": "Options for the `flake8-copyright` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8CopyrightOptions" + "$ref": "#/definitions/Flake8CopyrightOptions" }, { "type": "null" @@ -301,7 +301,7 @@ "description": "Options for the `flake8-errmsg` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8ErrMsgOptions" + "$ref": "#/definitions/Flake8ErrMsgOptions" }, { "type": "null" @@ -313,7 +313,7 @@ "description": "Options for the `flake8-gettext` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8GetTextOptions" + "$ref": "#/definitions/Flake8GetTextOptions" }, { "type": "null" @@ -325,7 +325,7 @@ "description": "Options for the `flake8-implicit-str-concat` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8ImplicitStrConcatOptions" + "$ref": "#/definitions/Flake8ImplicitStrConcatOptions" }, { "type": "null" @@ -337,7 +337,7 @@ "description": "Options for the `flake8-import-conventions` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8ImportConventionsOptions" + "$ref": "#/definitions/Flake8ImportConventionsOptions" }, { "type": "null" @@ -349,7 +349,7 @@ "description": "Options for the `flake8-pytest-style` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8PytestStyleOptions" + "$ref": "#/definitions/Flake8PytestStyleOptions" }, { "type": "null" @@ -361,7 +361,7 @@ "description": "Options for the `flake8-quotes` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8QuotesOptions" + "$ref": "#/definitions/Flake8QuotesOptions" }, { "type": "null" @@ -373,7 +373,7 @@ "description": "Options for the `flake8_self` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8SelfOptions" + "$ref": "#/definitions/Flake8SelfOptions" }, { "type": "null" @@ -385,7 +385,7 @@ "description": "Options for the `flake8-tidy-imports` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8TidyImportsOptions" + "$ref": "#/definitions/Flake8TidyImportsOptions" }, { "type": "null" @@ -397,7 +397,7 @@ "description": "Options for the `flake8-type-checking` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8TypeCheckingOptions" + "$ref": "#/definitions/Flake8TypeCheckingOptions" }, { "type": "null" @@ -409,7 +409,7 @@ "description": "Options for the `flake8-unused-arguments` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8UnusedArgumentsOptions" + "$ref": "#/definitions/Flake8UnusedArgumentsOptions" }, { "type": "null" @@ -428,7 +428,7 @@ "description": "Options to configure code formatting.", "anyOf": [ { - "$ref": "#/$defs/FormatOptions" + "$ref": "#/definitions/FormatOptions" }, { "type": "null" @@ -443,7 +443,7 @@ ], "deprecated": true, "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "ignore-init-module-imports": { @@ -468,7 +468,7 @@ "description": "The number of spaces per indentation level (tab).\n\nUsed by the formatter and when enforcing long-line violations (like `E501`) to determine the visual\nwidth of a tab.\n\nThis option changes the number of spaces the formatter inserts when\nusing soft-tabs (`indent-style = space`).\n\nPEP 8 recommends using 4 spaces per [indentation level](https://peps.python.org/pep-0008/#indentation).", "anyOf": [ { - "$ref": "#/$defs/IndentWidth" + "$ref": "#/definitions/IndentWidth" }, { "type": "null" @@ -479,7 +479,7 @@ "description": "Options for the `isort` plugin.", "anyOf": [ { - "$ref": "#/$defs/IsortOptions" + "$ref": "#/definitions/IsortOptions" }, { "type": "null" @@ -491,7 +491,7 @@ "description": "The line length to use when enforcing long-lines violations (like `E501`)\nand at which `isort` and the formatter prefers to wrap lines.\n\nThe length is determined by the number of characters per line, except for lines containing East Asian characters or emojis.\nFor these lines, the [unicode width](https://unicode.org/reports/tr11/) of each character is added up to determine the length.\n\nThe value must be greater than `0` and less than or equal to `320`.\n\nNote: While the formatter will attempt to format lines such that they remain\nwithin the `line-length`, it isn't a hard upper bound, and formatted lines may\nexceed the `line-length`.\n\nSee [`pycodestyle.max-line-length`](#lint_pycodestyle_max-line-length) to configure different lengths for `E501` and the formatter.", "anyOf": [ { - "$ref": "#/$defs/LineLength" + "$ref": "#/definitions/LineLength" }, { "type": "null" @@ -501,7 +501,7 @@ "lint": { "anyOf": [ { - "$ref": "#/$defs/LintOptions" + "$ref": "#/definitions/LintOptions" }, { "type": "null" @@ -523,7 +523,7 @@ "description": "Options for the `mccabe` plugin.", "anyOf": [ { - "$ref": "#/$defs/McCabeOptions" + "$ref": "#/definitions/McCabeOptions" }, { "type": "null" @@ -545,7 +545,7 @@ "description": "The style in which violation messages should be formatted: `\"full\"` (default)\n(shows source), `\"concise\"`, `\"grouped\"` (group messages by file), `\"json\"`\n(machine-readable), `\"junit\"` (machine-readable XML), `\"github\"` (GitHub\nActions annotations), `\"gitlab\"` (GitLab CI code quality report),\n`\"pylint\"` (Pylint text format) or `\"azure\"` (Azure Pipeline logging commands).", "anyOf": [ { - "$ref": "#/$defs/OutputFormat" + "$ref": "#/definitions/OutputFormat" }, { "type": "null" @@ -556,7 +556,7 @@ "description": "Options for the `pep8-naming` plugin.", "anyOf": [ { - "$ref": "#/$defs/Pep8NamingOptions" + "$ref": "#/definitions/Pep8NamingOptions" }, { "type": "null" @@ -573,7 +573,7 @@ "additionalProperties": { "type": "array", "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "deprecated": true @@ -585,7 +585,7 @@ "null" ], "additionalProperties": { - "$ref": "#/$defs/PythonVersion" + "$ref": "#/definitions/PythonVersion" } }, "preview": { @@ -599,7 +599,7 @@ "description": "Options for the `pycodestyle` plugin.", "anyOf": [ { - "$ref": "#/$defs/PycodestyleOptions" + "$ref": "#/definitions/PycodestyleOptions" }, { "type": "null" @@ -611,7 +611,7 @@ "description": "Options for the `pydocstyle` plugin.", "anyOf": [ { - "$ref": "#/$defs/PydocstyleOptions" + "$ref": "#/definitions/PydocstyleOptions" }, { "type": "null" @@ -623,7 +623,7 @@ "description": "Options for the `pyflakes` plugin.", "anyOf": [ { - "$ref": "#/$defs/PyflakesOptions" + "$ref": "#/definitions/PyflakesOptions" }, { "type": "null" @@ -635,7 +635,7 @@ "description": "Options for the `pylint` plugin.", "anyOf": [ { - "$ref": "#/$defs/PylintOptions" + "$ref": "#/definitions/PylintOptions" }, { "type": "null" @@ -647,7 +647,7 @@ "description": "Options for the `pyupgrade` plugin.", "anyOf": [ { - "$ref": "#/$defs/PyUpgradeOptions" + "$ref": "#/definitions/PyUpgradeOptions" }, { "type": "null" @@ -659,7 +659,7 @@ "description": "Enforce a requirement on the version of Ruff, to enforce at runtime.\nIf the version of Ruff does not meet the requirement, Ruff will exit\nwith an error.\n\nUseful for unifying results across many environments, e.g., with a\n`pyproject.toml` file.\n\nAccepts a [PEP 440](https://peps.python.org/pep-0440/) specifier, like `==0.3.1` or `>=0.3.1`.", "anyOf": [ { - "$ref": "#/$defs/RequiredVersion" + "$ref": "#/definitions/RequiredVersion" }, { "type": "null" @@ -681,7 +681,7 @@ ], "deprecated": true, "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "show-fixes": { @@ -705,7 +705,7 @@ "description": "The minimum Python version to target, e.g., when considering automatic\ncode upgrades, like rewriting type annotations. Ruff will not propose\nchanges using features that are not available in the given version.\n\nFor example, to represent supporting Python >=3.11 or ==3.11\nspecify `target-version = \"py311\"`.\n\nIf you're already using a `pyproject.toml` file, we recommend\n`project.requires-python` instead, as it's based on Python packaging\nstandards, and will be respected by other tools. For example, Ruff\ntreats the following as identical to `target-version = \"py38\"`:\n\n```toml\n[project]\nrequires-python = \">=3.8\"\n```\n\nIf both are specified, `target-version` takes precedence over\n`requires-python`. See [_Inferring the Python version_](https://docs.astral.sh/ruff/configuration/#inferring-the-python-version)\nfor a complete description of how the `target-version` is determined\nwhen left unspecified.\n\nNote that a stub file can [sometimes make use of a typing feature](https://typing.python.org/en/latest/spec/distributing.html#syntax)\nbefore it is available at runtime, as long as the stub does not make\nuse of new *syntax*. For example, a type checker will understand\n`int | str` in a stub as being a `Union` type annotation, even if the\ntype checker is run using Python 3.9, despite the fact that the `|`\noperator can only be used to create union types at runtime on Python\n3.10+. As such, Ruff will often recommend newer features in a stub\nfile than it would for an equivalent runtime file with the same target\nversion.", "anyOf": [ { - "$ref": "#/$defs/PythonVersion" + "$ref": "#/definitions/PythonVersion" }, { "type": "null" @@ -742,7 +742,7 @@ ], "deprecated": true, "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "unsafe-fixes": { @@ -754,7 +754,7 @@ } }, "additionalProperties": false, - "$defs": { + "definitions": { "Alias": { "type": "string" }, @@ -773,7 +773,7 @@ "description": "Whether to generate a map from file to files that it depends on (dependencies) or files that\ndepend on it (dependents).", "anyOf": [ { - "$ref": "#/$defs/Direction" + "$ref": "#/definitions/Direction" }, { "type": "null" @@ -890,7 +890,7 @@ "description": "Wrap docstring code examples at a fixed line width.", "oneOf": [ { - "$ref": "#/$defs/LineWidth" + "$ref": "#/definitions/LineWidth" } ] }, @@ -1165,7 +1165,7 @@ "null" ], "items": { - "$ref": "#/$defs/string" + "type": "string" } }, "function-names": { @@ -1175,7 +1175,7 @@ "null" ], "items": { - "$ref": "#/$defs/string" + "type": "string" } } }, @@ -1206,7 +1206,7 @@ "null" ], "additionalProperties": { - "$ref": "#/$defs/Alias" + "$ref": "#/definitions/Alias" } }, "banned-aliases": { @@ -1216,7 +1216,7 @@ "null" ], "additionalProperties": { - "$ref": "#/$defs/BannedAliases" + "$ref": "#/definitions/BannedAliases" } }, "banned-from": { @@ -1237,7 +1237,7 @@ "null" ], "additionalProperties": { - "$ref": "#/$defs/Alias" + "$ref": "#/definitions/Alias" } } }, @@ -1265,7 +1265,7 @@ "description": "Expected type for multiple argument names in `@pytest.mark.parametrize`.\nThe following values are supported:\n\n- `csv` — a comma-separated list, e.g.\n `@pytest.mark.parametrize(\"name1,name2\", ...)`\n- `tuple` (default) — e.g.\n `@pytest.mark.parametrize((\"name1\", \"name2\"), ...)`\n- `list` — e.g. `@pytest.mark.parametrize([\"name1\", \"name2\"], ...)`", "anyOf": [ { - "$ref": "#/$defs/ParametrizeNameType" + "$ref": "#/definitions/ParametrizeNameType" }, { "type": "null" @@ -1276,7 +1276,7 @@ "description": "Expected type for each row of values in `@pytest.mark.parametrize` in\ncase of multiple parameters. The following values are supported:\n\n- `tuple` (default) — e.g.\n `@pytest.mark.parametrize((\"name1\", \"name2\"), [(1, 2), (3, 4)])`\n- `list` — e.g.\n `@pytest.mark.parametrize((\"name1\", \"name2\"), [[1, 2], [3, 4]])`", "anyOf": [ { - "$ref": "#/$defs/ParametrizeValuesRowType" + "$ref": "#/definitions/ParametrizeValuesRowType" }, { "type": "null" @@ -1287,7 +1287,7 @@ "description": "Expected type for the list of values rows in `@pytest.mark.parametrize`.\nThe following values are supported:\n\n- `tuple` — e.g. `@pytest.mark.parametrize(\"name\", (1, 2, 3))`\n- `list` (default) — e.g. `@pytest.mark.parametrize(\"name\", [1, 2, 3])`", "anyOf": [ { - "$ref": "#/$defs/ParametrizeValuesType" + "$ref": "#/definitions/ParametrizeValuesType" }, { "type": "null" @@ -1352,7 +1352,7 @@ "description": "Quote style to prefer for docstrings (either \"single\" or \"double\").\n\nWhen using the formatter, only \"double\" is compatible, as the formatter\nenforces double quotes for docstrings strings.", "anyOf": [ { - "$ref": "#/$defs/Quote" + "$ref": "#/definitions/Quote" }, { "type": "null" @@ -1363,7 +1363,7 @@ "description": "Quote style to prefer for inline strings (either \"single\" or\n\"double\").\n\nWhen using the formatter, ensure that [`format.quote-style`](#format_quote-style) is set to\nthe same preferred quote style.", "anyOf": [ { - "$ref": "#/$defs/Quote" + "$ref": "#/definitions/Quote" }, { "type": "null" @@ -1374,7 +1374,7 @@ "description": "Quote style to prefer for multiline strings (either \"single\" or\n\"double\").\n\nWhen using the formatter, only \"double\" is compatible, as the formatter\nenforces double quotes for multiline strings.", "anyOf": [ { - "$ref": "#/$defs/Quote" + "$ref": "#/definitions/Quote" }, { "type": "null" @@ -1395,7 +1395,7 @@ "null" ], "items": { - "$ref": "#/$defs/string" + "type": "string" } }, "ignore-names": { @@ -1405,7 +1405,7 @@ "null" ], "items": { - "$ref": "#/$defs/string" + "type": "string" } } }, @@ -1419,7 +1419,7 @@ "description": "Whether to ban all relative imports (`\"all\"`), or only those imports\nthat extend into the parent module or beyond (`\"parents\"`).", "anyOf": [ { - "$ref": "#/$defs/Strictness" + "$ref": "#/definitions/Strictness" }, { "type": "null" @@ -1433,7 +1433,7 @@ "null" ], "additionalProperties": { - "$ref": "#/$defs/ApiBan" + "$ref": "#/definitions/ApiBan" } }, "banned-module-level-imports": { @@ -1529,7 +1529,7 @@ "description": "Set the line length used when formatting code snippets in docstrings.\n\nThis only has an effect when the `docstring-code-format` setting is\nenabled.\n\nThe default value for this setting is `\"dynamic\"`, which has the effect\nof ensuring that any reformatted code examples in docstrings adhere to\nthe global line length configuration that is used for the surrounding\nPython code. The point of this setting is that it takes the indentation\nof the docstring into account when reformatting code examples.\n\nAlternatively, this can be set to a fixed integer, which will result\nin the same line length limit being applied to all reformatted code\nexamples in docstrings. When set to a fixed integer, the indent of the\ndocstring is not taken into account. That is, this may result in lines\nin the reformatted code example that exceed the globally configured\nline length limit.\n\nFor example, when this is set to `20` and [`docstring-code-format`](#docstring-code-format)\nis enabled, then this code:\n\n```python\ndef f(x):\n '''\n Something about `f`. And an example:\n\n .. code-block:: python\n\n foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)\n '''\n pass\n```\n\n... will be reformatted (assuming the rest of the options are set\nto their defaults) as:\n\n```python\ndef f(x):\n \"\"\"\n Something about `f`. And an example:\n\n .. code-block:: python\n\n (\n foo,\n bar,\n quux,\n ) = this_is_a_long_line(\n lion,\n hippo,\n lemur,\n bear,\n )\n \"\"\"\n pass\n```", "anyOf": [ { - "$ref": "#/$defs/DocstringCodeLineWidth" + "$ref": "#/definitions/DocstringCodeLineWidth" }, { "type": "null" @@ -1550,7 +1550,7 @@ "description": "Whether to use spaces or tabs for indentation.\n\n`indent-style = \"space\"` (default):\n\n```python\ndef f():\n print(\"Hello\") # Spaces indent the `print` statement.\n```\n\n`indent-style = \"tab\"`:\n\n```python\ndef f():\n print(\"Hello\") # A tab `\\t` indents the `print` statement.\n```\n\nPEP 8 recommends using spaces for [indentation](https://peps.python.org/pep-0008/#indentation).\nWe care about accessibility; if you do not need tabs for accessibility, we do not recommend you use them.\n\nSee [`indent-width`](#indent-width) to configure the number of spaces per indentation and the tab width.", "anyOf": [ { - "$ref": "#/$defs/IndentStyle" + "$ref": "#/definitions/IndentStyle" }, { "type": "null" @@ -1561,7 +1561,7 @@ "description": "The character Ruff uses at the end of a line.\n\n* `auto`: The newline style is detected automatically on a file per file basis. Files with mixed line endings will be converted to the first detected line ending. Defaults to `\\n` for files that contain no line endings.\n* `lf`: Line endings will be converted to `\\n`. The default line ending on Unix.\n* `cr-lf`: Line endings will be converted to `\\r\\n`. The default line ending on Windows.\n* `native`: Line endings will be converted to `\\n` on Unix and `\\r\\n` on Windows.", "anyOf": [ { - "$ref": "#/$defs/LineEnding" + "$ref": "#/definitions/LineEnding" }, { "type": "null" @@ -1579,7 +1579,7 @@ "description": "Configures the preferred quote character for strings. The recommended options are\n\n* `double` (default): Use double quotes `\"`\n* `single`: Use single quotes `'`\n\nIn compliance with [PEP 8](https://peps.python.org/pep-0008/) and [PEP 257](https://peps.python.org/pep-0257/),\nRuff prefers double quotes for triple quoted strings and docstrings even when using `quote-style = \"single\"`.\n\nRuff deviates from using the configured quotes if doing so prevents the need for\nescaping quote characters inside the string:\n\n```python\na = \"a string without any quotes\"\nb = \"It's monday morning\"\n```\n\nRuff will change the quotes of the string assigned to `a` to single quotes when using `quote-style = \"single\"`.\nHowever, Ruff uses double quotes for the string assigned to `b` because using single quotes would require escaping the `'`,\nwhich leads to the less readable code: `'It\\'s monday morning'`.\n\nIn addition, Ruff supports the quote style `preserve` for projects that already use\na mixture of single and double quotes and can't migrate to the `double` or `single` style.\nThe quote style `preserve` leaves the quotes of all strings unchanged.", "anyOf": [ { - "$ref": "#/$defs/QuoteStyle" + "$ref": "#/definitions/QuoteStyle" }, { "type": "null" @@ -1599,7 +1599,7 @@ "ImportSection": { "anyOf": [ { - "$ref": "#/$defs/ImportType" + "$ref": "#/definitions/ImportType" }, { "type": "string" @@ -1679,7 +1679,7 @@ "description": "Define a default section for any imports that don't fit into the specified [`section-order`](#lint_isort_section-order).", "anyOf": [ { - "$ref": "#/$defs/ImportSection" + "$ref": "#/definitions/ImportSection" }, { "type": "null" @@ -1819,7 +1819,7 @@ "null" ], "items": { - "$ref": "#/$defs/ImportSection" + "$ref": "#/definitions/ImportSection" } }, "no-sections": { @@ -1840,7 +1840,7 @@ "description": "Whether to place \"closer\" imports (fewer `.` characters, most local)\nbefore \"further\" imports (more `.` characters, least local), or vice\nversa.\n\nThe default (\"furthest-to-closest\") is equivalent to isort's\n[`reverse-relative`](https://pycqa.github.io/isort/docs/configuration/options.html#reverse-relative) default (`reverse-relative = false`); setting\nthis to \"closest-to-furthest\" is equivalent to isort's\n`reverse-relative = true`.", "anyOf": [ { - "$ref": "#/$defs/RelativeImportsOrder" + "$ref": "#/definitions/RelativeImportsOrder" }, { "type": "null" @@ -1854,7 +1854,7 @@ "null" ], "items": { - "$ref": "#/$defs/NameImports" + "$ref": "#/definitions/NameImports" } }, "section-order": { @@ -1864,7 +1864,7 @@ "null" ], "items": { - "$ref": "#/$defs/ImportSection" + "$ref": "#/definitions/ImportSection" } }, "sections": { @@ -1995,7 +1995,7 @@ "null" ], "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "extend-ignore": { @@ -2006,7 +2006,7 @@ ], "deprecated": true, "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "extend-per-file-ignores": { @@ -2018,7 +2018,7 @@ "additionalProperties": { "type": "array", "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } } }, @@ -2029,7 +2029,7 @@ "null" ], "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "extend-select": { @@ -2039,7 +2039,7 @@ "null" ], "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "extend-unfixable": { @@ -2050,7 +2050,7 @@ ], "deprecated": true, "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "extend-unsafe-fixes": { @@ -2060,7 +2060,7 @@ "null" ], "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "external": { @@ -2080,14 +2080,14 @@ "null" ], "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "flake8-annotations": { "description": "Options for the `flake8-annotations` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8AnnotationsOptions" + "$ref": "#/definitions/Flake8AnnotationsOptions" }, { "type": "null" @@ -2098,7 +2098,7 @@ "description": "Options for the `flake8-bandit` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8BanditOptions" + "$ref": "#/definitions/Flake8BanditOptions" }, { "type": "null" @@ -2109,7 +2109,7 @@ "description": "Options for the `flake8-boolean-trap` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8BooleanTrapOptions" + "$ref": "#/definitions/Flake8BooleanTrapOptions" }, { "type": "null" @@ -2120,7 +2120,7 @@ "description": "Options for the `flake8-bugbear` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8BugbearOptions" + "$ref": "#/definitions/Flake8BugbearOptions" }, { "type": "null" @@ -2131,7 +2131,7 @@ "description": "Options for the `flake8-builtins` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8BuiltinsOptions" + "$ref": "#/definitions/Flake8BuiltinsOptions" }, { "type": "null" @@ -2142,7 +2142,7 @@ "description": "Options for the `flake8-comprehensions` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8ComprehensionsOptions" + "$ref": "#/definitions/Flake8ComprehensionsOptions" }, { "type": "null" @@ -2153,7 +2153,7 @@ "description": "Options for the `flake8-copyright` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8CopyrightOptions" + "$ref": "#/definitions/Flake8CopyrightOptions" }, { "type": "null" @@ -2164,7 +2164,7 @@ "description": "Options for the `flake8-errmsg` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8ErrMsgOptions" + "$ref": "#/definitions/Flake8ErrMsgOptions" }, { "type": "null" @@ -2175,7 +2175,7 @@ "description": "Options for the `flake8-gettext` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8GetTextOptions" + "$ref": "#/definitions/Flake8GetTextOptions" }, { "type": "null" @@ -2186,7 +2186,7 @@ "description": "Options for the `flake8-implicit-str-concat` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8ImplicitStrConcatOptions" + "$ref": "#/definitions/Flake8ImplicitStrConcatOptions" }, { "type": "null" @@ -2197,7 +2197,7 @@ "description": "Options for the `flake8-import-conventions` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8ImportConventionsOptions" + "$ref": "#/definitions/Flake8ImportConventionsOptions" }, { "type": "null" @@ -2208,7 +2208,7 @@ "description": "Options for the `flake8-pytest-style` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8PytestStyleOptions" + "$ref": "#/definitions/Flake8PytestStyleOptions" }, { "type": "null" @@ -2219,7 +2219,7 @@ "description": "Options for the `flake8-quotes` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8QuotesOptions" + "$ref": "#/definitions/Flake8QuotesOptions" }, { "type": "null" @@ -2230,7 +2230,7 @@ "description": "Options for the `flake8_self` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8SelfOptions" + "$ref": "#/definitions/Flake8SelfOptions" }, { "type": "null" @@ -2241,7 +2241,7 @@ "description": "Options for the `flake8-tidy-imports` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8TidyImportsOptions" + "$ref": "#/definitions/Flake8TidyImportsOptions" }, { "type": "null" @@ -2252,7 +2252,7 @@ "description": "Options for the `flake8-type-checking` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8TypeCheckingOptions" + "$ref": "#/definitions/Flake8TypeCheckingOptions" }, { "type": "null" @@ -2263,7 +2263,7 @@ "description": "Options for the `flake8-unused-arguments` plugin.", "anyOf": [ { - "$ref": "#/$defs/Flake8UnusedArgumentsOptions" + "$ref": "#/definitions/Flake8UnusedArgumentsOptions" }, { "type": "null" @@ -2284,7 +2284,7 @@ "null" ], "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "ignore-init-module-imports": { @@ -2299,7 +2299,7 @@ "description": "Options for the `isort` plugin.", "anyOf": [ { - "$ref": "#/$defs/IsortOptions" + "$ref": "#/definitions/IsortOptions" }, { "type": "null" @@ -2320,7 +2320,7 @@ "description": "Options for the `mccabe` plugin.", "anyOf": [ { - "$ref": "#/$defs/McCabeOptions" + "$ref": "#/definitions/McCabeOptions" }, { "type": "null" @@ -2331,7 +2331,7 @@ "description": "Options for the `pep8-naming` plugin.", "anyOf": [ { - "$ref": "#/$defs/Pep8NamingOptions" + "$ref": "#/definitions/Pep8NamingOptions" }, { "type": "null" @@ -2347,7 +2347,7 @@ "additionalProperties": { "type": "array", "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } } }, @@ -2362,7 +2362,7 @@ "description": "Options for the `pycodestyle` plugin.", "anyOf": [ { - "$ref": "#/$defs/PycodestyleOptions" + "$ref": "#/definitions/PycodestyleOptions" }, { "type": "null" @@ -2373,7 +2373,7 @@ "description": "Options for the `pydoclint` plugin.", "anyOf": [ { - "$ref": "#/$defs/PydoclintOptions" + "$ref": "#/definitions/PydoclintOptions" }, { "type": "null" @@ -2384,7 +2384,7 @@ "description": "Options for the `pydocstyle` plugin.", "anyOf": [ { - "$ref": "#/$defs/PydocstyleOptions" + "$ref": "#/definitions/PydocstyleOptions" }, { "type": "null" @@ -2395,7 +2395,7 @@ "description": "Options for the `pyflakes` plugin.", "anyOf": [ { - "$ref": "#/$defs/PyflakesOptions" + "$ref": "#/definitions/PyflakesOptions" }, { "type": "null" @@ -2406,7 +2406,7 @@ "description": "Options for the `pylint` plugin.", "anyOf": [ { - "$ref": "#/$defs/PylintOptions" + "$ref": "#/definitions/PylintOptions" }, { "type": "null" @@ -2417,7 +2417,7 @@ "description": "Options for the `pyupgrade` plugin.", "anyOf": [ { - "$ref": "#/$defs/PyUpgradeOptions" + "$ref": "#/definitions/PyUpgradeOptions" }, { "type": "null" @@ -2428,7 +2428,7 @@ "description": "Options for the `ruff` plugin", "anyOf": [ { - "$ref": "#/$defs/RuffOptions" + "$ref": "#/definitions/RuffOptions" }, { "type": "null" @@ -2442,7 +2442,7 @@ "null" ], "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } }, "task-tags": { @@ -2479,7 +2479,7 @@ "null" ], "items": { - "$ref": "#/$defs/RuleSelector" + "$ref": "#/definitions/RuleSelector" } } }, @@ -2619,7 +2619,7 @@ "description": "The maximum line length to allow for [`doc-line-too-long`](https://docs.astral.sh/ruff/rules/doc-line-too-long/) violations within\ndocumentation (`W505`), including standalone comments. By default,\nthis is set to `null` which disables reporting violations.\n\nThe length is determined by the number of characters per line, except for lines containing Asian characters or emojis.\nFor these lines, the [unicode width](https://unicode.org/reports/tr11/) of each character is added up to determine the length.\n\nSee the [`doc-line-too-long`](https://docs.astral.sh/ruff/rules/doc-line-too-long/) rule for more information.", "anyOf": [ { - "$ref": "#/$defs/LineLength" + "$ref": "#/definitions/LineLength" }, { "type": "null" @@ -2630,7 +2630,7 @@ "description": "The maximum line length to allow for [`line-too-long`](https://docs.astral.sh/ruff/rules/line-too-long/) violations. By default,\nthis is set to the value of the [`line-length`](#line-length) option.\n\nUse this option when you want to detect extra-long lines that the formatter can't automatically split by setting\n`pycodestyle.line-length` to a value larger than [`line-length`](#line-length).\n\n```toml\n# The formatter wraps lines at a length of 88.\nline-length = 88\n\n[pycodestyle]\n# E501 reports lines that exceed the length of 100.\nmax-line-length = 100\n```\n\nThe length is determined by the number of characters per line, except for lines containing East Asian characters or emojis.\nFor these lines, the [unicode width](https://unicode.org/reports/tr11/) of each character is added up to determine the length.\n\nSee the [`line-too-long`](https://docs.astral.sh/ruff/rules/line-too-long/) rule for more information.", "anyOf": [ { - "$ref": "#/$defs/LineLength" + "$ref": "#/definitions/LineLength" }, { "type": "null" @@ -2662,7 +2662,7 @@ "description": "Whether to use Google-style, NumPy-style conventions, or the [PEP 257](https://peps.python.org/pep-0257/)\ndefaults when analyzing docstring sections.\n\nEnabling a convention will disable all rules that are not included in\nthe specified convention. As such, the intended workflow is to enable a\nconvention and then selectively enable or disable any additional rules\non top of it.\n\nFor example, to use Google-style conventions but avoid requiring\ndocumentation for every function parameter:\n\n```toml\n[tool.ruff.lint]\n# Enable all `pydocstyle` rules, limiting to those that adhere to the\n# Google convention via `convention = \"google\"`, below.\nselect = [\"D\"]\n\n# On top of the Google convention, disable `D417`, which requires\n# documentation for every function parameter.\nignore = [\"D417\"]\n\n[tool.ruff.lint.pydocstyle]\nconvention = \"google\"\n```\n\nTo enable an additional rule that's excluded from the convention,\nselect the desired rule via its fully qualified rule code (e.g.,\n`D400` instead of `D4` or `D40`):\n\n```toml\n[tool.ruff.lint]\n# Enable D400 on top of the Google convention.\nextend-select = [\"D400\"]\n\n[tool.ruff.lint.pydocstyle]\nconvention = \"google\"\n```", "anyOf": [ { - "$ref": "#/$defs/Convention" + "$ref": "#/definitions/Convention" }, { "type": "null" @@ -2748,7 +2748,7 @@ "null" ], "items": { - "$ref": "#/$defs/ConstantType" + "$ref": "#/definitions/ConstantType" } }, "max-args": { @@ -4348,9 +4348,6 @@ "const": "all" } ] - }, - "string": { - "type": "string" } } } \ No newline at end of file diff --git a/ty.schema.json b/ty.schema.json index 59c62f6a1ae702..270241fb287cf9 100644 --- a/ty.schema.json +++ b/ty.schema.json @@ -1,5 +1,5 @@ { - "$schema": "https://json-schema.org/draft/2020-12/schema", + "$schema": "http://json-schema.org/draft-07/schema#", "title": "Options", "type": "object", "properties": { @@ -7,7 +7,7 @@ "description": "Configures the type checking environment.", "anyOf": [ { - "$ref": "#/$defs/EnvironmentOptions" + "$ref": "#/definitions/EnvironmentOptions" }, { "type": "null" @@ -18,7 +18,7 @@ "description": "Override configurations for specific file patterns.\n\nEach override specifies include/exclude patterns and rule configurations\nthat apply to matching files. Multiple overrides can match the same file,\nwith later overrides taking precedence.", "anyOf": [ { - "$ref": "#/$defs/OverridesOptions" + "$ref": "#/definitions/OverridesOptions" }, { "type": "null" @@ -29,7 +29,7 @@ "description": "Configures the enabled rules and their severity.\n\nSee [the rules documentation](https://ty.dev/rules) for a list of all available rules.\n\nValid severities are:\n\n* `ignore`: Disable the rule.\n* `warn`: Enable the rule and create a warning diagnostic.\n* `error`: Enable the rule and create an error diagnostic.\n ty will exit with a non-zero code if any error diagnostics are emitted.", "anyOf": [ { - "$ref": "#/$defs/Rules" + "$ref": "#/definitions/Rules" }, { "type": "null" @@ -39,7 +39,7 @@ "src": { "anyOf": [ { - "$ref": "#/$defs/SrcOptions" + "$ref": "#/definitions/SrcOptions" }, { "type": "null" @@ -49,7 +49,7 @@ "terminal": { "anyOf": [ { - "$ref": "#/$defs/TerminalOptions" + "$ref": "#/definitions/TerminalOptions" }, { "type": "null" @@ -58,11 +58,11 @@ } }, "additionalProperties": false, - "$defs": { + "definitions": { "Array_of_string": { "type": "array", "items": { - "$ref": "#/$defs/string" + "$ref": "#/definitions/string" } }, "EnvironmentOptions": { @@ -75,14 +75,14 @@ "null" ], "items": { - "$ref": "#/$defs/RelativePathBuf" + "$ref": "#/definitions/RelativePathBuf" } }, "python": { "description": "Path to your project's Python environment or interpreter.\n\nty uses the `site-packages` directory of your project's Python environment\nto resolve third-party (and, in some cases, first-party) imports in your code.\n\nIf you're using a project management tool such as uv, you should not generally need\nto specify this option, as commands such as `uv run` will set the `VIRTUAL_ENV`\nenvironment variable to point to your project's virtual environment. ty can also infer\nthe location of your environment from an activated Conda environment, and will look for\na `.venv` directory in the project root if none of the above apply.\n\nPassing a path to a Python executable is supported, but passing a path to a dynamic executable\n(such as a shim) is not currently supported.\n\nThis option can be used to point to virtual or system Python environments.", "anyOf": [ { - "$ref": "#/$defs/RelativePathBuf" + "$ref": "#/definitions/RelativePathBuf" }, { "type": "null" @@ -93,7 +93,7 @@ "description": "Specifies the target platform that will be used to analyze the source code.\nIf specified, ty will understand conditions based on comparisons with `sys.platform`, such\nas are commonly found in typeshed to reflect the differing contents of the standard library across platforms.\nIf `all` is specified, ty will assume that the source code can run on any platform.\n\nIf no platform is specified, ty will use the current platform:\n- `win32` for Windows\n- `darwin` for macOS\n- `android` for Android\n- `ios` for iOS\n- `linux` for everything else", "anyOf": [ { - "$ref": "#/$defs/PythonPlatform" + "$ref": "#/definitions/PythonPlatform" }, { "type": "null" @@ -104,7 +104,7 @@ "description": "Specifies the version of Python that will be used to analyze the source code.\nThe version should be specified as a string in the format `M.m` where `M` is the major version\nand `m` is the minor (e.g. `\"3.0\"` or `\"3.6\"`).\nIf a version is provided, ty will generate errors if the source code makes use of language features\nthat are not supported in that version.\n\nIf a version is not specified, ty will try the following techniques in order of preference\nto determine a value:\n1. Check for the `project.requires-python` setting in a `pyproject.toml` file\n and use the minimum version from the specified range\n2. Check for an activated or configured Python environment\n and attempt to infer the Python version of that environment\n3. Fall back to the default value (see below)\n\nFor some language features, ty can also understand conditionals based on comparisons\nwith `sys.version_info`. These are commonly found in typeshed, for example,\nto reflect the differing contents of the standard library across Python versions.", "anyOf": [ { - "$ref": "#/$defs/PythonVersion" + "$ref": "#/definitions/PythonVersion" }, { "type": "null" @@ -118,14 +118,14 @@ "null" ], "items": { - "$ref": "#/$defs/RelativePathBuf" + "$ref": "#/definitions/RelativePathBuf" } }, "typeshed": { "description": "Optional path to a \"typeshed\" directory on disk for us to use for standard-library types.\nIf this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,\nbundled as a zip file in the binary", "anyOf": [ { - "$ref": "#/$defs/RelativePathBuf" + "$ref": "#/definitions/RelativePathBuf" }, { "type": "null" @@ -189,7 +189,7 @@ "description": "A list of file and directory patterns to exclude from this override.\n\nPatterns follow a syntax similar to `.gitignore`.\nExclude patterns take precedence over include patterns within the same override.\n\nIf not specified, defaults to `[]` (excludes no files).", "anyOf": [ { - "$ref": "#/$defs/Array_of_string" + "$ref": "#/definitions/Array_of_string" }, { "type": "null" @@ -200,7 +200,7 @@ "description": "A list of file and directory patterns to include for this override.\n\nThe `include` option follows a similar syntax to `.gitignore` but reversed:\nIncluding a file or directory will make it so that it (and its contents)\nare affected by this override.\n\nIf not specified, defaults to `[\"**\"]` (matches all files).", "anyOf": [ { - "$ref": "#/$defs/Array_of_string" + "$ref": "#/definitions/Array_of_string" }, { "type": "null" @@ -211,7 +211,7 @@ "description": "Rule overrides for files matching the include/exclude patterns.\n\nThese rules will be merged with the global rules, with override rules\ntaking precedence for matching files. You can set rules to different\nseverity levels or disable them entirely.", "anyOf": [ { - "$ref": "#/$defs/Rules" + "$ref": "#/definitions/Rules" }, { "type": "null" @@ -225,7 +225,7 @@ "description": "Configuration override that applies to specific files based on glob patterns.\n\nAn override allows you to apply different rule configurations to specific\nfiles or directories. Multiple overrides can match the same file, with\nlater overrides take precedence.\n\n### Precedence\n\n- Later overrides in the array take precedence over earlier ones\n- Override rules take precedence over global rules for matching files\n\n### Examples\n\n```toml\n# Relax rules for test files\n[[tool.ty.overrides]]\ninclude = [\"tests/**\", \"**/test_*.py\"]\n\n[tool.ty.overrides.rules]\npossibly-unresolved-reference = \"warn\"\n\n# Ignore generated files but still check important ones\n[[tool.ty.overrides]]\ninclude = [\"generated/**\"]\nexclude = [\"generated/important.py\"]\n\n[tool.ty.overrides.rules]\npossibly-unresolved-reference = \"ignore\"\n```", "type": "array", "items": { - "$ref": "#/$defs/OverrideOptions" + "$ref": "#/definitions/OverrideOptions" } }, "PythonPlatform": { @@ -256,7 +256,7 @@ "anyOf": [ { "type": "string", - "pattern": "^\\\\d+\\\\.\\\\d+$" + "pattern": "^\\d+\\.\\d+$" }, { "description": "Python 3.7", @@ -294,7 +294,11 @@ }, "RelativePathBuf": { "description": "A possibly relative path in a configuration file.\n\nRelative paths in configuration files or from CLI options\nrequire different anchoring:\n\n* CLI: The path is relative to the current working directory\n* Configuration file: The path is relative to the project's root.", - "$ref": "#/$defs/SystemPathBuf" + "allOf": [ + { + "$ref": "#/definitions/SystemPathBuf" + } + ] }, "Rules": { "type": "object", @@ -305,7 +309,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -315,7 +319,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -325,7 +329,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -335,7 +339,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -345,7 +349,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -355,7 +359,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -365,7 +369,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -375,7 +379,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -385,7 +389,7 @@ "default": "ignore", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -395,7 +399,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -405,7 +409,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -415,7 +419,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -425,7 +429,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -435,7 +439,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -445,7 +449,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -455,7 +459,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -465,7 +469,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -475,7 +479,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -485,7 +489,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -495,7 +499,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -505,7 +509,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -515,7 +519,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -525,7 +529,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -535,7 +539,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -545,7 +549,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -555,7 +559,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -565,7 +569,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -575,7 +579,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -585,7 +589,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -595,7 +599,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -605,7 +609,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -615,7 +619,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -625,7 +629,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -635,7 +639,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -645,7 +649,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -655,7 +659,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -665,7 +669,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -675,7 +679,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -685,7 +689,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -695,7 +699,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -705,7 +709,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -715,7 +719,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -725,7 +729,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -735,7 +739,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -745,7 +749,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -755,7 +759,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -765,7 +769,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -775,7 +779,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -785,7 +789,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -795,7 +799,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -805,7 +809,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -815,7 +819,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -825,7 +829,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -835,7 +839,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -845,7 +849,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -855,7 +859,7 @@ "default": "ignore", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -865,7 +869,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -875,7 +879,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -885,7 +889,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -895,7 +899,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -905,7 +909,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -915,7 +919,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -925,7 +929,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -935,7 +939,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -945,7 +949,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -955,7 +959,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -965,7 +969,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -975,7 +979,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -985,7 +989,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -995,7 +999,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -1005,7 +1009,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -1015,7 +1019,7 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -1025,7 +1029,7 @@ "default": "ignore", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -1035,7 +1039,7 @@ "default": "warn", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] }, @@ -1045,13 +1049,13 @@ "default": "error", "oneOf": [ { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } ] } }, "additionalProperties": { - "$ref": "#/$defs/Level" + "$ref": "#/definitions/Level" } }, "SrcOptions": { @@ -1061,7 +1065,7 @@ "description": "A list of file and directory patterns to exclude from type checking.\n\nPatterns follow a syntax similar to `.gitignore`:\n\n- `./src/` matches only a directory\n- `./src` matches both files and directories\n- `src` matches files or directories named `src`\n- `*` matches any (possibly empty) sequence of characters (except `/`).\n- `**` matches zero or more path components.\n This sequence **must** form a single path component, so both `**a` and `b**` are invalid and will result in an error.\n A sequence of more than two consecutive `*` characters is also invalid.\n- `?` matches any single character except `/`\n- `[abc]` matches any character inside the brackets. Character sequences can also specify ranges of characters, as ordered by Unicode,\n so e.g. `[0-9]` specifies any character between `0` and `9` inclusive. An unclosed bracket is invalid.\n- `!pattern` negates a pattern (undoes the exclusion of files that would otherwise be excluded)\n\nAll paths are anchored relative to the project root (`src` only\nmatches `/src` and not `/test/src`).\nTo exclude any directory or file named `src`, use `**/src` instead.\n\nBy default, ty excludes commonly ignored directories:\n\n- `**/.bzr/`\n- `**/.direnv/`\n- `**/.eggs/`\n- `**/.git/`\n- `**/.git-rewrite/`\n- `**/.hg/`\n- `**/.mypy_cache/`\n- `**/.nox/`\n- `**/.pants.d/`\n- `**/.pytype/`\n- `**/.ruff_cache/`\n- `**/.svn/`\n- `**/.tox/`\n- `**/.venv/`\n- `**/__pypackages__/`\n- `**/_build/`\n- `**/buck-out/`\n- `**/dist/`\n- `**/node_modules/`\n- `**/venv/`\n\nYou can override any default exclude by using a negated pattern. For example,\nto re-include `dist` use `exclude = [\"!dist\"]`", "anyOf": [ { - "$ref": "#/$defs/Array_of_string" + "$ref": "#/definitions/Array_of_string" }, { "type": "null" @@ -1072,7 +1076,7 @@ "description": "A list of files and directories to check. The `include` option\nfollows a similar syntax to `.gitignore` but reversed:\nIncluding a file or directory will make it so that it (and its contents)\nare type checked.\n\n- `./src/` matches only a directory\n- `./src` matches both files and directories\n- `src` matches a file or directory named `src`\n- `*` matches any (possibly empty) sequence of characters (except `/`).\n- `**` matches zero or more path components.\n This sequence **must** form a single path component, so both `**a` and `b**` are invalid and will result in an error.\n A sequence of more than two consecutive `*` characters is also invalid.\n- `?` matches any single character except `/`\n- `[abc]` matches any character inside the brackets. Character sequences can also specify ranges of characters, as ordered by Unicode,\n so e.g. `[0-9]` specifies any character between `0` and `9` inclusive. An unclosed bracket is invalid.\n\nAll paths are anchored relative to the project root (`src` only\nmatches `/src` and not `/test/src`).\n\n`exclude` takes precedence over `include`.", "anyOf": [ { - "$ref": "#/$defs/Array_of_string" + "$ref": "#/definitions/Array_of_string" }, { "type": "null" @@ -1090,7 +1094,7 @@ "description": "The root of the project, used for finding first-party modules.\n\nIf left unspecified, ty will try to detect common project layouts and initialize `src.root` accordingly:\n\n* if a `./src` directory exists, include `.` and `./src` in the first party search path (src layout or flat)\n* if a `.//` directory exists, include `.` and `./` in the first party search path\n* otherwise, default to `.` (flat layout)\n\nBesides, if a `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` file),\nit will also be included in the first party search path.", "anyOf": [ { - "$ref": "#/$defs/RelativePathBuf" + "$ref": "#/definitions/RelativePathBuf" }, { "type": "null" @@ -1119,7 +1123,7 @@ "description": "The format to use for printing diagnostic messages.\n\nDefaults to `full`.", "anyOf": [ { - "$ref": "#/$defs/OutputFormat" + "$ref": "#/definitions/OutputFormat" }, { "type": "null" From 7711a64e2014ba28140e7e79ff9b6cd8705e1c44 Mon Sep 17 00:00:00 2001 From: Takayuki Maeda Date: Mon, 20 Oct 2025 15:46:14 +0900 Subject: [PATCH 4/4] remove unnecessary attrs --- crates/ruff_workspace/src/options.rs | 2 -- crates/ty_python_semantic/src/python_platform.rs | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/ruff_workspace/src/options.rs b/crates/ruff_workspace/src/options.rs index 5fd4cf0e626a94..47ee0fe738ddd2 100644 --- a/crates/ruff_workspace/src/options.rs +++ b/crates/ruff_workspace/src/options.rs @@ -601,7 +601,6 @@ impl schemars::JsonSchema for DeprecatedTopLevelLintOptions { // Don't add any new options to this struct. Add them to [`LintOptions`] directly to avoid exposing them in the // global settings. #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] -#[cfg_attr(feature = "schemars", schemars(inline))] #[derive( Clone, Debug, PartialEq, Eq, Default, OptionsMetadata, CombineOptions, Serialize, Deserialize, )] @@ -3890,7 +3889,6 @@ pub struct AnalyzeOptions { /// Like [`LintCommonOptions`], but with any `#[serde(flatten)]` fields inlined. This leads to far, /// far better error messages when deserializing. -#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Debug, Clone, Default, Deserialize)] #[serde(rename_all = "kebab-case", deny_unknown_fields)] pub struct LintOptionsWire { diff --git a/crates/ty_python_semantic/src/python_platform.rs b/crates/ty_python_semantic/src/python_platform.rs index 017cc4d21b60d9..b21424ee33b086 100644 --- a/crates/ty_python_semantic/src/python_platform.rs +++ b/crates/ty_python_semantic/src/python_platform.rs @@ -76,6 +76,7 @@ mod schema { schema.into() } + // Hard code some well known values, but allow any other string as well. let mut any_of = vec![schemars::json_schema!({ "type": "string" }).into()]; // Promote well-known values for better auto-completion. // Using `const` over `enumValues` as recommended [here](https://github.com/SchemaStore/schemastore/blob/master/CONTRIBUTING.md#documenting-enums).